branch_name
stringclasses 149
values | text
stringlengths 23
89.3M
| directory_id
stringlengths 40
40
| languages
listlengths 1
19
| num_files
int64 1
11.8k
| repo_language
stringclasses 38
values | repo_name
stringlengths 6
114
| revision_id
stringlengths 40
40
| snapshot_id
stringlengths 40
40
|
|---|---|---|---|---|---|---|---|---|
refs/heads/master
|
<repo_name>MattiasAerts/Robotic-controlled-gun<file_sep>/stepper_motor_x_en_y.ino
/*
* Besturing van twee stappenmotoren met Arduino en een joystick
*/
//Opname van bibliotheken
#include <AccelStepper.h>
#include <Bounce2.h>
//definitie van de constanten van de Arduino-pinnen
const int ledEnable = 13; //de led aan boord zal ons de activeringsstatus van de motoren laten zien
const int pinSwEnable = 7; //de knop in de joystickmodule die het besturingselement in- of uitschakelt
const int pinEnable = 8; //de pinnen die de ENABLE-status van de A4988-stuurprogramma's regelen, zijn in serie geschakeld, dus er is slechts één pin nodig om beide te beheren
unsigned long debounceDelay = 10; //milliseconden voor de knop debonuce
const int jX = A0; //analoge pen die de waarden voor de X leest
const int stepX = 3; //digitale pen die de STEP-signalen naar de X-driver stuurt
const int dirX = 4; //digitale pen die het DIRECTION-signaal naar de X-driver stuurt
long speedX, valX, mapX; //motor X management variabelen
const int jY = A1; //analoge pen die de waarden voor Y leest
const int stepY = 5; //digitale pen die de STEP-signalen naar de Y-driver stuurt
const int dirY = 6; //digitale pen die het DIRECTION-signaal naar de Y-driver stuurt
long speedY, valY, mapY; //Y variabelen voor beheer van motorbewegingen
//variabelen gebruikt door de AccelStepper-bibliotheek
const int maxSpeed = 1000; //volgens de documentatie van de bibliotheek kan deze waarde worden ingesteld op 4000 voor een Arduino UNO
const int minSpeed = 0; //minimum motortoerental
const float accelerazione = 50.0; //aantal stappen per seconde bij acceleratie
const int treshold = 30; //het lezen van de potentiometers is nooit 100% betrouwbaar, deze waarde helpt om het punt te bepalen dat moet worden beschouwd als "Blijf stil" in de bewegingen
long tresholdUp, tresholdDown; //servicevariabelen om de hierboven beschreven taak te volbrengen
boolean abilitato, muoviX, muoviY, enable; //variabelen voor bewegingsbeheer
Bounce btnEnable = Bounce(); //een knop van de Bounce-bibliotheek instantiëren
//de motoren aansteken
AccelStepper motoreX(AccelStepper::DRIVER, stepX, dirX);
AccelStepper motoreY(AccelStepper::DRIVER, stepY, dirY);
void setup() {
//initialiseer waarden
speedX = speedY = 0;
enable = false;
//definitie van de modaliteiten van de pinnen
pinMode(ledEnable, OUTPUT);
pinMode(pinEnable, OUTPUT);
pinMode(pinSwEnable, INPUT_PULLUP); //de invoer van de schakelaar moet worden ingesteld als INPUT_PULLUP
digitalWrite(ledEnable, enable);
digitalWrite(pinEnable, !enable); //De A4988-drivers schakelen de commando's naar de motor uit als op de ENABLE-pin een HOOG signaal wordt ontvangen, daarom is de waarde tegengesteld aan die van de LED
//configureer de joystickknop met behulp van de Bounce-bibliotheek
btnEnable.attach(pinSwEnable);
btnEnable.interval(debounceDelay);
//berekent afstandswaarden waarbinnen de positie van de joystick als "stilstaand" kan worden beschouwd
tresholdDown = (maxSpeed / 2) - treshold;
tresholdUp = (maxSpeed / 2) + treshold;
//stel de motorparameters in
motoreX.setMaxSpeed(maxSpeed);
motoreX.setSpeed(minSpeed);
motoreX.setAcceleration(accelerazione);
motoreY.setMaxSpeed(maxSpeed);
motoreY.setSpeed(minSpeed);
motoreY.setAcceleration(accelerazione);
}
void loop() {
//voer de controle- en leesfunctie uit van de knop die de activeringsstatus bepaalt
checkEnable();
digitalWrite(ledEnable, enable); //toont de activeringsstatus via de LED op pin 13
digitalWrite(pinEnable, !enable); //stel de tegenovergestelde waarde in op de ENABLE-pinnen van de stuurprogramma's
//voer een analoge uitlezing uit van de waarden die afkomstig zijn van de joystick-potentiometers
valX = analogRead(jX);
valY = analogRead(jY);
//brengt de waarden in kaart die worden gelezen volgens de maximale en de hoogste snelheid
mapX = map(valX, 0, 1023, minSpeed, maxSpeed);
mapY = map(valY, 0, 1023, minSpeed, maxSpeed);
//voer de motorcommandofunctie uit
pilotaMotori(mapX, mapY);
}
void pilotaMotori(long mapX, long mapY) {
if (mapX <= tresholdDown) {
//x gaat terug
speedX = -map(mapX, tresholdDown, minSpeed, minSpeed, maxSpeed);
muoviX = true;
} else if (mapX >= tresholdUp) {
//x gaat door
speedX = map(mapX, maxSpeed, tresholdUp, maxSpeed, minSpeed);
muoviX = true;
} else {
//x staat stil
speedX = 0;
muoviX = false;
}
if (mapY <= tresholdDown) {
//y daalt
speedY = -map(mapY, tresholdDown, minSpeed, minSpeed, maxSpeed);
muoviY = true;
} else if (mapY >= tresholdUp) {
//y gaat omhoog
speedY = map(mapY, maxSpeed, tresholdUp, maxSpeed, minSpeed);
muoviY = true;
} else {
//y staat stil
speedY = 0;
muoviY = false;
}
if (muoviX) {
motoreX.setSpeed(speedX);
motoreX.run();
} else {
motoreX.stop();
}
if (muoviY) {
motoreY.setSpeed(speedY);
motoreY.run();
} else {
motoreY.stop();
}
}
void checkEnable() {
btnEnable.update();
if (btnEnable.fell()) {
enable = !enable;
}
}
//https://www.lombardoandrea.com/motori-passo-passo-arduino-joystick/ (italiaans)
<file_sep>/README.md
# Robotic controlled gun
Een constructie maken van een robotisch gestuurd geweer met een loop waarmee ik een 6mm kogel of pijltje kan afvuren in een zelf gekozen richting met joystick.
De kogel ga ik afvuren met luchtdruk die ik zelf bepaal. (persluchtreservoir)
De plaats waar het voorwerp terecht zal komen ga ik aantonen met een laser.
De richting wordt aangestuurd door een stappenmotoren die aan een lineare as zijn verbonden.
Om de stappenmotoren te laten draaien gebruik ik een joystick.
De joystick kan het platform laten draaien en een hoek maken met de horizontale x-as (maximum 45°).
<file_sep>/iets.md
'''C++
/*
* Besturing van twee stappenmotoren met Arduino en een joystick
*/
//Opname van bibliotheken
#include <AccelStepper.h>
#include <Bounce2.h>
//definitie van de constanten van de Arduino-pinnen
const int ledEnable = 13; //de led aan boord zal ons de activeringsstatus van de motoren laten zien
const int pinSwEnable = 7; //de knop in de joystickmodule die het besturingselement in- of uitschakelt
const int pinEnable = 8; //de pinnen die de ENABLE-status van de A4988-stuurprogramma's regelen,
// zijn in serie geschakeld, dus er is slechts één pin nodig om beide te beheren
unsigned long debounceDelay = 10; //milliseconden voor de knop debonuce
const int jX = A0; //analoge pen die de waarden voor de X leest
const int stepX = 3; //digitale pen die de STEP-signalen naar de X-driver stuurt
const int dirX = 4; //digitale pen die het DIRECTION-signaal naar de X-driver stuurt
long speedX, valX, mapX; //motor X management variabelen
const int jY = A1; //analoge pen die de waarden voor Y leest
const int stepY = 10; //digitale pen die de STEP-signalen naar de Y-driver stuurt
const int dirY = 11; //digitale pen die het DIRECTION-signaal naar de Y-driver stuurt
long speedY, valY, mapY; //Y variabelen voor beheer van motorbewegingen
//variabelen gebruikt door de AccelStepper-bibliotheek
const int maxSpeed = 1000; //volgens de documentatie van de bibliotheek kan deze waarde worden
//ingesteld op 4000 voor een Arduino UNO
const int minSpeed = 0; //minimum motortoerental
const float accelerazione = 50.0; //aantal stappen per seconde bij acceleratie
const int treshold =75; //het lezen van de potentiometers is nooit 100% betrouwbaar, deze waarde
//helpt om het punt te bepalen dat moet worden beschouwd als "Blijf stil" in de bewegingen
long tresholdUp, tresholdDown; //servicevariabelen om de hierboven beschreven taak te volbrengen
boolean abilitato, muoviX, muoviY, enable; //variabelen voor bewegingsbeheer
Bounce btnEnable = Bounce(); //een knop van de Bounce-bibliotheek instantiëren
//de motoren aansteken
AccelStepper motoreX(AccelStepper::DRIVER, stepX, dirX);
AccelStepper motoreY(AccelStepper::DRIVER, stepY, dirY);
void setup() {
//initialiseer waarden
speedX = speedY = 0;
enable = false;
//definitie van de modaliteiten van de pinnen
pinMode(ledEnable, OUTPUT);
pinMode(pinEnable, OUTPUT);
pinMode(pinSwEnable, INPUT_PULLUP); //de invoer van de schakelaar moet worden ingesteld als INPUT_PULLUP
digitalWrite(ledEnable, enable);
digitalWrite(pinEnable, !enable); //De A4988-drivers schakelen de commando's naar de motor uit als op de
//ENABLE-pin een HOOG signaal wordt ontvangen, daarom is de waarde tegengesteld aan die van de LED
//configureer de joystickknop met behulp van de Bounce-bibliotheek
btnEnable.attach(pinSwEnable);
btnEnable.interval(debounceDelay);
//berekent afstandswaarden waarbinnen de positie van de joystick als "stilstaand" kan worden beschouwd
tresholdDown = (maxSpeed / 2) - treshold;
tresholdUp = (maxSpeed / 2) + treshold;
//stel de motorparameters in
motoreX.setMaxSpeed(maxSpeed);
motoreX.setSpeed(minSpeed);
motoreX.setAcceleration(accelerazione);
motoreY.setMaxSpeed(maxSpeed);
motoreY.setSpeed(minSpeed);
motoreY.setAcceleration(accelerazione);
}
void loop() {
//voer de controle- en leesfunctie uit van de knop die de activeringsstatus bepaalt
checkEnable();
digitalWrite(ledEnable, enable); //toont de activeringsstatus via de LED op pin 13
digitalWrite(pinEnable, !enable); //stel de tegenovergestelde waarde in op de ENABLE-pinnen van de stuurprogramma's
//voer een analoge uitlezing uit van de waarden die afkomstig zijn van de joystick-potentiometers
valX = analogRead(jX);
valY = analogRead(jY);
//brengt de waarden in kaart die worden gelezen volgens de maximale en de hoogste snelheid
mapX = map(valX, 0, 1023, minSpeed, maxSpeed);
mapY = map(valY, 0, 1023, minSpeed, maxSpeed);
//voer de motorcommandofunctie uit
pilotaMotori(mapX, mapY);
}
void pilotaMotori(long mapX, long mapY) {
if (mapX <= tresholdDown) {
//x gaat terug
speedX = -map(mapX, tresholdDown, minSpeed, minSpeed, maxSpeed);
muoviX = true;
} else if (mapX >= tresholdUp) {
//x gaat door
speedX = map(mapX, maxSpeed, tresholdUp, maxSpeed, minSpeed);
muoviX = true;
} else {
//x staat stil
speedX = 0;
muoviX = false;
}
if (mapY <= tresholdDown) {
//y daalt
speedY = -map(mapY, tresholdDown, minSpeed, minSpeed, maxSpeed);
muoviY = true;
} else if (mapY >= tresholdUp) {
//y gaat omhoog
speedY = map(mapY, maxSpeed, tresholdUp, maxSpeed, minSpeed);
muoviY = true;
} else {
//y staat stil
speedY = 0;
muoviY = false;
}
if (muoviX) {
motoreX.setSpeed(speedX);
motoreX.run();
} else {
motoreX.stop();
}
if (muoviY) {
motoreY.setSpeed(speedY);
motoreY.run();
} else {
motoreY.stop();
}
}
void checkEnable() {
btnEnable.update();
if (btnEnable.fell()) {
enable = !enable;
}
}
'''
|
f6e1d6db3142575fe987443c909a0166bd01f616
|
[
"Markdown",
"C++"
] | 3
|
C++
|
MattiasAerts/Robotic-controlled-gun
|
119550113313afa3afb6eb739f0e8cdd3a8173f9
|
892a9ef13558fdadae3ded05351df124f370caea
|
refs/heads/master
|
<file_sep>from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
class User(db.Model):
uid = db.Column(db.Integer, primary_key=True, autoincrement=True)
username = db.Column(db.Unicode(32), unique=True, nullable=False)
information = db.Column(db.Unicode(1024))
def __init__(self, username, information):
self.username = username
self.information = information
class State(db.Model):
sid = db.Column(db.Integer, primary_key=True, autoincrement=True)
information = db.Column(db.Unicode(1024))
def __init__(self, information):
self.information = information
<file_sep>from flask import Flask
from flask_sqlalchemy import SQLAlchemy
import os
import sys
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///test.db'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True
db = SQLAlchemy()
db.init_app(app)
from app.models import *
with app.app_context():
db.create_all()
from app import echoserver
#hello
<file_sep>certifi==2017.7.27.1
chardet==3.0.4
click==6.7
Flask==0.12.2
Flask-SQLAlchemy==2.2
gunicorn==19.7.1
idna==2.5
itsdangerous==0.24
Jinja2==2.9.6
MarkupSafe==1.0
requests==2.18.3
SQLAlchemy==1.1.12
urllib3==1.22
Werkzeug==0.12.2
<file_sep>from flask import Flask, request
from app import app
from app.models import *
import json
import requests
import sys
import os
# This needs to be filled with the Page Access Token that will be provided
# by the Facebook App that will be created.
PAT = '<KEY>'
temp_sender = "hi"
temp_user = "hi"
temp_message = "hi"
@app.route('/', methods=['GET'])
def handle_verification():
if request.args.get('hub.verify_token', '') == '<PASSWORD>my_password_<PASSWORD>':
return request.args.get('hub.challenge', '')
else:
return 'Error, wrong validation token'
@app.route('/', methods=['POST'])
def handle_messages():
payload = request.get_data()
global temp_sender
global temp_message
global temp_user
current_state = State.query.filter_by(sid = 1).first()
if current_state:
if current_state.information == "store_user":
data = json.loads(payload)
message_events = data["entry"][0]["messaging"]
for event in message_events:
if "message" in event:
temp_sender = event["sender"]["id"]
temp_user = event["message"]["text"]
db.session.delete(current_state)
db.session.commit()
new_state = State("add_user")
db.session.add(new_state)
db.session.commit()
send_message(PAT, temp_sender, "What information would you like to store?".encode('unicode_escape'))
return "ok"
if current_state.information == "add_user":
data = json.loads(payload)
message_events = data["entry"][0]["messaging"]
for event in message_events:
if "message" in event:
temp_sender = event["sender"]["id"]
temp_message = event["message"]["text"]
add_user_info(current_state)
db.session.delete(current_state)
db.session.commit()
return "ok"
if current_state.information == "list_user":
data = json.loads(payload)
message_events = data["entry"][0]["messaging"]
for event in message_events:
if "message" in event:
temp_sender = event["sender"]["id"]
temp_message = event["message"]["text"]
list_user_info(current_state)
return "ok"
if current_state.information == "edit_user":
data = json.loads(payload)
message_events = data["entry"][0]["messaging"]
for event in message_events:
if "message" in event:
temp_sender = event["sender"]["id"]
temp_user = event["message"]["text"]
db.session.delete(current_state)
db.session.commit()
new_state = State("edit_user_info")
db.session.add(new_state)
db.session.commit()
send_message(PAT, temp_sender, "What new information would you like to store".encode('unicode_escape'))
return "ok"
if current_state.information == "edit_user_info":
data = json.loads(payload)
message_events = data["entry"][0]["messaging"]
for event in message_events:
if "message" in event:
temp_sender = event["sender"]["id"]
temp_message = event["message"]["text"]
edit_user_info(current_state)
db.session.delete(current_state)
db.session.commit()
return "ok"
else:
messaging_events(payload)
return "ok"
def messaging_events(payload):
"""Generate tuples of (sender_id, message_text) from the
provided payload.
"""
global global_flag
data = json.loads(payload)
message_events = data["entry"][0]["messaging"]
for event in message_events:
if "message" in event:
if "Add" in event["message"]["text"]:
# ret_message = add_user_info(event["sender"]["id"])
new_state = State("store_user")
db.session.add(new_state)
db.session.commit()
send_message(PAT, event["sender"]["id"],"Full name of new entry".encode('unicode_escape'))
elif "List" in event["message"]["text"]:
# ret_message = list_user_info(event["sender"]["id"])
new_state = State("list_user")
db.session.add(new_state)
db.session.commit()
send_message(PAT, event["sender"]["id"], "Full name of user".encode('unicode_escape'))
elif "Edit" in event["message"]["text"]:
new_state = State("edit_user")
db.session.add(new_state)
db.session.commit()
send_message(PAT, event["sender"]["id"], "Name of user to edit".encode('unicode_escape'))
else:
send_message(PAT, event["sender"]["id"], "Not a recognized command".encode('unicode_escape'))
def add_user_info(curr_state):
global temp_sender
global temp_message
global temp_user
user = User.query.filter_by(username = temp_user).first()
if (user):
db.session.delete(curr_state)
db.session.commit()
send_message(PAT, temp_sender, "User already exists".encode('unicode_escape'))
return
new_user = User(temp_user, temp_message)
db.session.add(new_user)
db.session.commit()
db.session.delete(curr_state)
db.session.commit()
send_message(PAT, temp_sender, "Success".encode('unicode_escape'))
def list_user_info(curr_state):
global temp_sender
global temp_message
global information
user = User.query.filter_by(username = temp_message).first()
if user:
send_message(PAT, temp_sender, user.information.encode("unicode_escape"))
else:
send_message(PAT, temp_sender, "No such user".encode("unicode_escape"))
db.session.delete(curr_state)
db.session.commit()
def edit_user_info(curr_state):
global temp_sender
global temp_message
global temp_user
user = User.query.filter_by(username = temp_user).first()
if (user):
user.information = temp_message
send_message(PAT, temp_sender, "Success".encode('unicode_escape'))
return
db.session.delete(curr_state)
db.session.commit()
send_message(PAT, temp_sender, "User does not exist".encode('unicode_escape'))
return
def send_message(token, recipient, text):
"""Send the message text to recipient with id recipient.
"""
r = requests.post("https://graph.facebook.com/v2.6/me/messages",
params={"access_token": token},
data=json.dumps({
"recipient": {"id": recipient},
"message": {"text": text.decode('unicode_escape')}
}),
headers={'Content-type': 'application/json'})
|
10b0d8a08c2ec9c02a0b3d2dc386da9c44135734
|
[
"Python",
"Text"
] | 4
|
Python
|
Mang2015/chatbot
|
66dcc74126cc28f4fecd8782bf315eb2d328c7ce
|
2fd58d2f96e6522682c845b7c3db0e51141d5ccc
|
refs/heads/master
|
<file_sep>import random
import math
from LectorDescriptores import LectorDescriptores
from DataPoint import DataPoint
from Centroid import Centroid
NUM_CLUSTERS = 3
TOTAL_DATA = 7
A_SEED = 0
B_SEED = 71
C_SEED = 141
BIG_NUMBER = math.pow(10, 10)
# SAMPLES = [[1.0, 1.0], [1.5, 2.0], [3.0, 4.0], [5.0, 7.0], [3.5, 5.0], [4.5, 5.0], [3.5, 4.5], [6.0, 8.0]]
data = []
# centroids = [200]
centroid_A = list()
centroid_B = list()
centroid_C = list()
lector = LectorDescriptores()
#dataSet = list()
dataSet = lector.cargarDatos()
def initialize_centroids():
centroids_A = Centroid(dataSet[A_SEED].getNombreImagen(), dataSet[A_SEED].getVectorMomentosHu())
centroids_B = Centroid(dataSet[B_SEED].getNombreImagen(), dataSet[B_SEED].getVectorMomentosHu())
centroids_C = Centroid(dataSet[C_SEED].getNombreImagen(), dataSet[C_SEED].getVectorMomentosHu())
print("Centroides inicializados en:")
print("(", centroids_A.getNombreImagen() , ", ", centroids_A.getVectorMomentosHu() , ")")
print("(", centroids_B.getNombreImagen() , ", ", centroids_B.getVectorMomentosHu() , ")")
print("(", centroids_C.getNombreImagen() , ", ", centroids_C.getVectorMomentosHu() , ")")
print()
return
initialize_centroids()
"""
def initialize_centroids():
centroids.append(Centroid(SAMPLES[LOWEST_SAMPLE_POINT][0], SAMPLES[LOWEST_SAMPLE_POINT][1]))
centroids.append(Centroid(SAMPLES[HIGHEST_SAMPLE_POINT][0], SAMPLES[HIGHEST_SAMPLE_POINT][1]))
print("Centroids initialized at:")
print("(", centroids[0].get_x(), ", ", centroids[0].get_y(), ")")
print("(", centroids[1].get_x(), ", ", centroids[1].get_y(), ")")
print()
return
def initialize_datapoints():
for i in range(TOTAL_DATA):
newPoint = DataPoint(SAMPLES[i][0], SAMPLES[i][1])
if(i == LOWEST_SAMPLE_POINT):
newPoint.set_cluster(0)
elif(i == HIGHEST_SAMPLE_POINT):
newPoint.set_cluster(1)
else:
newPoint.set_cluster(None)
data.append(newPoint)
return
def get_distance(dataPointX, dataPointY, centroidX, centroidY):
return math.sqrt(math.pow((centroidY - dataPointY), 2) + math.pow((centroidX - dataPointX), 2))
def recalculate_centroids():
totalX = 0
totalY = 0
totalInCluster = 0
for j in range(NUM_CLUSTERS):
for k in range(len(data)):
if(data[k].get_cluster() == j):
totalX += data[k].get_x()
totalY += data[k].get_y()
totalInCluster += 1
if(totalInCluster > 0):
centroids[j].set_x(totalX / totalInCluster)
centroids[j].set_y(totalY / totalInCluster)
return
def update_clusters():
isStillMoving = 0
for i in range(TOTAL_DATA):
bestMinimum = BIG_NUMBER
currentCluster = 0
for j in range(NUM_CLUSTERS):
distance = get_distance(data[i].get_x(), data[i].get_y(), centroids[j].get_x(), centroids[j].get_y())
if(distance < bestMinimum):
bestMinimum = distance
currentCluster = j
data[i].set_cluster(currentCluster)
if(data[i].get_cluster() is None or data[i].get_cluster() != currentCluster):
data[i].set_cluster(currentCluster)
isStillMoving = 1
return isStillMoving
def perform_kmeans():
isStillMoving = 1
initialize_centroids()
initialize_datapoints()
while(isStillMoving):
recalculate_centroids()
isStillMoving = update_clusters()
return
def print_results():
for i in range(NUM_CLUSTERS):
print("Cluster ", i, " includes:")
for j in range(TOTAL_DATA):
if(data[j].get_cluster() == i):
print("(", data[j].get_x(), ", ", data[j].get_y(), ")")
print()
return
perform_kmeans()
print_results()
"""<file_sep># Algoritmo Kmeans
Algoritmo kmeans usando un dataset del repositorio de la [UCI](https://archive.ics.uci.edu/ml/machine-learning-databases/00236/seeds_dataset.txt)
Para ejecutar
```bash
python main.py
```
|
04d6462e059da23b47cb6bb02fcbfc651ead8533
|
[
"Markdown",
"Python"
] | 2
|
Python
|
weylermaldonado/kmean
|
034824a42382824237a227284f62f561a6123625
|
8ee8eed6a51067a851dbb76f9114ed1c5806ba70
|
refs/heads/main
|
<file_sep>/*Calcular y presentar cuantas cifras tiene un determinado valor numerico introducido por teclado*/
#include <cstdlib>
#include <stdio.h>
using namespace std;
int main() {
int numero = 2;
int cifras = 1;
while (numero >= 10) {
numero /= 10;
cifras++;
}
printf("tiene &d cifras", cifras);
printf ("\n\n\n"); system("PAUSE");
return 0;
}
|
ada8eb6fac3983a9b96eb39e5c129b1d2d4165e1
|
[
"C++"
] | 1
|
C++
|
kadirBernardo/sumativa-9
|
7d36c648428f34da6d6853e3fe2ca73dcfdad706
|
d990c0f0459b8d078349121bdf5452d36808cd3c
|
refs/heads/master
|
<file_sep>using System;
using AppKit;
using Foundation;
using LocalAuthentication;
namespace SuperSecureApp
{
public partial class ViewController : NSViewController
{
public ViewController(IntPtr handle) : base(handle)
{
}
public override void ViewDidLoad()
{
base.ViewDidLoad();
UseTouchID();
}
void UseTouchID()
{
var contex = new LAContext();
NSError authError;
if (contex.CanEvaluatePolicy(LAPolicy.DeviceOwnerAuthenticationWithBiometrics, out authError))
{
replyHandler = new LAContextReplyHandler((success, error) =>
{
if (success)
{
InvokeOnMainThread(()=>ShowMessage());
}
});
}
contex.EvaluatePolicy(LAPolicy.DeviceOwnerAuthenticationWithBiometrics, "authenticate", replyHandler);
}
partial void SignUpClicked(NSObject sender)
{
string username = Username.StringValue;
if (string.IsNullOrEmpty(username) || string.IsNullOrEmpty(Password.StringValue))
return;
KeychainHelpers.SetPasswordForUsername(username, Password.StringValue, SecureId,
Security.SecAccessible.Always, true);
NSUserDefaults.StandardUserDefaults.SetString(username, "username");
NSUserDefaults.StandardUserDefaults.Synchronize();
}
partial void LoginClicked(NSObject sender)
{
string username = NSUserDefaults.StandardUserDefaults.StringForKey("username");
string password = KeychainHelpers.GetPasswordForUsername(Username.StringValue, SecureId, true);
if (Username.StringValue == "guest")
{
ShowMessage();
}
}
partial void TouchIDClicked(NSObject sender)
{
UseTouchID();
}
void ShowMessage(string message = "Here's your message Mr. Archer")
{
var alert = new NSAlert();
alert.MessageText = "Secrete Message";
alert.InformativeText = message;
alert.AlertStyle = NSAlertStyle.Informational;
alert.BeginSheet(this.View.Window);
}
public override NSObject RepresentedObject
{
get
{
return base.RepresentedObject;
}
set
{
base.RepresentedObject = value;
// Update the view, if already loaded.
}
}
const string SecureId = "SuperSecureApp";
LAContextReplyHandler replyHandler;
}
}
|
5efff93dcbfd7d1501d9cafc4317d8bdb5ce2578
|
[
"C#"
] | 1
|
C#
|
dmsheets/SuperSecureApp
|
465e4fa39a8a8428fdeae0b6ccbb955f69c7e913
|
ea733d2072e4f2d8ea8e025162f4e6a27711620c
|
refs/heads/master
|
<repo_name>LorenK96/UITestUnity<file_sep>/UnityTest/Tests/Android/AndroidTests.cs
using NUnit.Framework;
using Xamarin.UITest;
using Xamarin.GameTestServer;
using System.Linq;
using System.Threading;
namespace UnityTest.Tests.Android
{
class AndroidTests : TestEnv
{
[TestFixture]
public class TestApp
{
UnityApp App;
[SetUp]
public void Setup()
{
IApp app = ConfigureApp.Android.ApkFile(APK_PATH).StartApp();
UnityApp.Initialize(app, PHONE_IP);
App = UnityApp.Shared;
}
[Test]
public void FailedLogin()
{
// Make sure the app has loaded for everyone
Thread.Sleep(30000);
// Wait for Login Screen
App.WaitForElement("LoginScreen");
App.ScreenShot("Login screen loaded");
// Press Login withought entering any text
GameButton login = App.GetGameButtons().First(b => b.Name == "BtnLogin");
App.Tap(login);
App.ScreenShot("Tapped login");
Thread.Sleep(1000);
// Get error message
Assert.IsTrue(
App.GetGameTexts().Any(x => x.IsOnScreen && x.Name == "ErrorText" && x.Text != "")
);
}
}
}
}
|
c25ef4d19c31f70b6c6ee8e85e9c0b99dcec6ffa
|
[
"C#"
] | 1
|
C#
|
LorenK96/UITestUnity
|
5cf54069aa39fb3e3492ff219bbf157e4b157708
|
2683ea3ba2e8c69ea5cc5bcd50f88963fb894334
|
refs/heads/main
|
<file_sep><?php
require_once('vendor/autoload.php');
$f3 = Base::instance();
$f3->config('config.ini');
$f3->config('routes.ini');
/*$app->route('GET /',
function(){
echo 'Bienvenidos';
}
);
*/
/*
$app->route('GET /',
function(){
echo 'Acerca de este proyecto...';
}
);
*/
/*
$f3->route('GET /brew/@count',
function($f3) {
echo $f3->get('PARAMS.count').' bottles of beer on the wall.';
}
);
*/
$f3->run();<file_sep>[globals]
DEBUG=3
;messagehello=Message
UI=app/views/
AUTOLOAD=app/models/|app/controllers/
devdb = "mysql:host=127.0.0.1;port=3306;dbname=laravel"
devdbusername = "root"
devdbpassword = "<PASSWORD>"<file_sep>[routes]
;base routes
GET /=MainController->render
;GET /hello=MainController->sayhello<file_sep><?php
class MainController extends Controller{
function render(){
$message = new Messages($this->db);
$message->key = 'Second message';
$message->message = 'This is the second message inserted from code';
$message->save();
$messages = new Messages($this->db);
$msg = $messages->all()[0];
$this->f3->set('msg',$msg);
$template=new Template;
echo $template->render('template.htm');
}
/*
function render($f3){
$f3->set('name','world');
$template=new Template;
echo $template->render('template.htm');
}
function sayhello(){
echo 'Hello, babe!';
}*/
}
|
4d633c461e0d368c1caa980527068077f02a5020
|
[
"PHP",
"INI"
] | 4
|
PHP
|
rpariona81/myapp-fatfree
|
97d90be7195473568ec86e4f859f7aae143ae55b
|
f21bc02c04e879f153af7de6d5388ad399bb9b9e
|
refs/heads/master
|
<file_sep># 2014WeMedia H5
<file_sep>var pages = document.querySelectorAll(".container > div");
var index = 0; // 页数
var startY = 0;
var moveY = 0;
var endY = 0;
var comY = 0;
var scoll = 0;
whatpage();
qiangpiao.addEventListener('click',function(){
window.location.href = "qiangpiao.html";
})
chuanyue.addEventListener('click',function(){
window.location.href = "chuanyue.html";
})
// 图片预加载
var imgObjs = {
arrow:"src/img/arrow.png",
page1_bg:"src/img/page1_bg.jpg",
page1_logo1:"src/img/page1_logo1.png",
page1_logo2:"src/img/page1_logo2.png",
page1_logo3:"src/img/page1_logo3.png",
page1_logo4:"src/img/page1_logo4.png",
page2_title:"src/img/page2_title.png",
page3_bg:"src/img/page3_bg.jpg",
page3_bh:"src/img/page3_bh.png",
page3_title:"src/img/page3_title.png",
page3_title1:"src/img/page3_title1.png",
page3_title2:"src/img/page3_title2.png",
page4_line1:"src/img/page4_line1.png",
page4_line2:"src/img/page4_line2.png",
page4_line3:"src/img/page4_line3.png",
page4_line4:"src/img/page4_line4.png",
page4_line5:"src/img/page4_line5.png",
page4_line6:"src/img/page4_line6.png",
page4_title:"src/img/page4_title.png",
page5_bg:"src/img/page5_bg.jpg",
page5_point:"src/img/page5_point.png",
page5_title:"src/img/page5_title.png",
page6_1_1:"src/img/page6_1_1.png",
page6_1_2:"src/img/page6_1_2.png",
page6_1_3:"src/img/page6_1_3.png",
page6_1_4:"src/img/page6_1_4.png",
page6_1_5:"src/img/page6_1_5.png",
page6_2_1:"src/img/page6_2_1.png",
page6_2_2:"src/img/page6_2_2.png",
page6_2_3:"src/img/page6_2_3.png",
page6_2_4:"src/img/page6_2_4.png",
page6_2_5:"src/img/page6_2_5.png",
page6_3_1:"src/img/page6_3_1.png",
page6_3_2:"src/img/page6_3_2.png",
page6_3_3:"src/img/page6_3_3.png",
page6_3_4:"src/img/page6_3_4.png",
page6_3_5:"src/img/page6_3_5.png",
page6_4_1:"src/img/page6_4_1.png",
page6_4_2:"src/img/page6_4_2.png",
page6_4_3:"src/img/page6_4_3.png",
page6_4_4:"src/img/page6_4_4.png",
page6_4_5:"src/img/page6_4_5.png",
page6_bg:"src/img/page6_bg.jpg",
page6_biglogo:"src/img/page6_biglogo.png",
page6_title:"src/img/page6_title.png",
page6_title2:"src/img/page6_title2.png",
page7_arrowpoint:"src/img/page7_arrowpoint.png",
page7_btn:"src/img/page7_btn.png",
page7_info:"src/img/page7_info.png",
page7_map:"src/img/page7_map.png",
page8_btn:"src/img/page8_btn.png",
page8_circle:"src/img/page8_circle.png",
page8_text:"src/img/page8_text.png",
page8_title:"src/img/page8_title.png",
}
var loaded = {}; // 加载完的图片对象
var index = 0; // 已加载的张数
var num = 0; // 加载总数
for(var i in imgObjs){
num++; // 图片总数
}
for(var img in imgObjs){
var imgObj = new Image();
loaded[img] = imgObj;
loaded[img].src = imgObjs[img];
loaded[img].onload = function(){
index++;
var baifenbi = parseInt((index/num)*100);
document.querySelector(".loadingbox .loading div").innerHTML = baifenbi+"%";
// console.log(baifenbi) // 打印加载进度
if(index >= num){
// 加载完毕的回调
document.querySelector(".loadingbox").style.display = "none";
mouseEvent();
index = 1;
whatpage();
bgm.muted = false;
}
}
}
// music 切换
var music = document.querySelector('.musicbox');
music.addEventListener('click',function(){
if(music.dataset.run == "true"){
music.dataset.run = "fasle";
bgm.muted = true;
music.style.animation = "muscistop";
}else{
music.dataset.run = "true";
bgm.muted = false;
music.style.animation = "musicrun 5s linear infinite forwards";
}
})
// 滑动切换页面
function mouseEvent(){
document.addEventListener("touchstart",function(e){
startY = e.touches[0].pageY;
if(!isrun){
tips.style.display = "none";
imgsrunnnnnn();
}
})
document.addEventListener("touchmove",function(e){
e.preventDefault();
moveY = e.touches[0].pageY;
if(moveY - startY < 0 && index < 8){
comY = (moveY - startY) / 2;
var scale = (document.body.clientHeight + comY) / document.body.clientHeight;
var x = (1-scale)*document.body.clientHeight / 2;
pages[index-1].style.transform = "scale("+scale+")";
pages[index-1].style.top = - x + "px";
pages[index].style.top = (document.body.clientHeight - (1-scale)*document.body.clientHeight) + "px";
scoll = (document.body.clientHeight - (1-scale)*document.body.clientHeight);
}else if(moveY - startY > 0 && index > 1){
comY = (startY - moveY) / 2;
var scale = (document.body.clientHeight + comY) / document.body.clientHeight;
var x = (1-scale)*document.body.clientHeight / 2;
pages[index-1].style.transform = "scale("+scale+")";
pages[index-1].style.top = x + "px";
pages[index-2].style.top = -document.body.clientHeight + x*2 + "px";
scoll = x * 2;
}
})
document.addEventListener("touchend",function(e){
endY = e.changedTouches[0].pageY;
comY = (endY - startY);
if(comY < 0){
gotoprun();
}else if(comY > 0){
godownrun();
}
})
}
function gotoprun(){
if(index == 8){
return;
}
var x = 0;
var autointerval = setInterval(function(){
x-=15;
scoll-=15;
pages[index-1].style.top = x + "px";
pages[index].style.top = scoll + "px";
if(scoll <= 0){
clearInterval(autointerval);
pages[index].style.top = "0px";
pages[index-1].style.top = -document.body.clientHeight + "px";
pages[index-1].style.transform = "scale(1)";
index++;
whatpage();
}
},1)
}
function godownrun(){
if(index == 1){
return;
}
var x = -document.body.clientHeight + scoll * 2;
var autointerval = setInterval(function(){
x+=15;
scoll+=15;
pages[index-2].style.top = x + "px";
pages[index-1].style.top = scoll + "px";
if(x >= 0){
clearInterval(autointerval);
pages[index-2].style.top = "0px";
pages[index-1].style.top = document.body.clientHeight + "px";
pages[index-1].style.transform = "scale(1)";
index--;
whatpage();
}
},1)
}
// 判断页数出现效果
function whatpage(){
removeall();
switch(index){
case 1:
firstshow();
break;
case 2:
secondshow();
break;
case 3:
page3show();
break;
case 4:
page4show();
break;
case 5:
page5show();
break;
case 6:
page6show();
break;
case 7:
page7show();
break;
case 8:
page8show();
break;
default:
return;
}
}
// 删除所有页动画
function removeall(){
var allimgs = document.querySelectorAll("img,li");
for(var i = 0; i < allimgs.length; i++){
allimgs[i].style.animation = "";
}
}
// 第一页效果
function firstshow(){
var firstimgs = document.querySelectorAll('.page1 > img');
var time = 0;
for(var i = 0; i < firstimgs.length - 1; i++){
firstimgs[i].style.animation = "page1imgshow .5s "+time+"s linear forwards";
time += 0.5;
}
}
// 第二页效果
function secondshow(){
var secondimgs = document.querySelectorAll('.page2 > img');
var time = 0;
for(var i = 0; i < secondimgs.length; i++){
secondimgs[i].style.animation = "page1imgshow .5s "+time+"s linear forwards";
time += 0.5;
}
}
// 第三页效果
function page3show(){
var page3imgs = document.querySelectorAll('.page3 > img');
var time = 0;
for(var i = 1; i < page3imgs.length; i++){
page3imgs[i].style.animation = "page1imgshow .5s "+time+"s linear forwards";
time += 0.5;
}
var page3lis = document.querySelectorAll('.page3 > ul > li');
var time2 = 0;
for(var i = 0; i < page3lis.length; i++){
page3lis[i].style.animation = "page3mdgo 5s "+time2+"s linear forwards";
time2 += 1.5;
}
}
// 第四页效果
function page4show(){
var page4imgs = document.querySelectorAll('.page4 > img');
var time = 0;
for(var i = 0; i < page4imgs.length; i++){
page4imgs[i].style.animation = "page1imgshow .5s "+time+"s linear forwards";
time += 0.5;
}
var page4lines = document.querySelectorAll('.page4 .lines');
var time2 = 1.2;
for(var i = 0; i <page4lines.length; i++){
page4lines[i].style.animation = "page4lineshow .2s "+time2+"s linear forwards";
time2+= 0.2;
}
}
// 第五页效果
function page5show(){
var page5imgs = document.querySelectorAll('.page5 > img');
var time = 0;
for(var i = 0; i < page5imgs.length - 1; i++){
page5imgs[i].style.animation = "page1imgshow .5s "+time+"s linear forwards";
time += 0.5;
}
document.querySelector('.page5 > img:nth-child(4)').style.animation = "4s pointshow 1.5s linear forwards infinite";
}
// 第六页效果
function page6show(){
var page6imgs = document.querySelectorAll('.page6 > img');
var time = 0;
for(var i = 0; i < page6imgs.length-2; i++){
page6imgs[i].style.animation = "page1imgshow .5s "+time+"s linear forwards";
time += 0.5;
}
var page6biglogo = document.querySelector('.page6 > img:nth-child(4)');
page6biglogo.style.animation = "page6biglogorun .7s 1.5s linear forwards"
var page6title2 = document.querySelector('.page6 > img:nth-child(5)');
page6title2.style.animation = "page1imgshow .5s 2.2s linear forwards";
var a1 = document.querySelectorAll('.page6 > div:nth-child(6) > img');
var a2 = document.querySelectorAll('.page6 > div:nth-child(7) > img');
var a3 = document.querySelectorAll('.page6 > div:nth-child(8) > img');
var a4 = document.querySelectorAll('.page6 > div:nth-child(9) > img');
var time2 = [2.5,2.6,2.7,2.8];
for(var i = 0; i < 5; i++){
a1[i].style.animation = "page6smalllogorun .5s "+time2[0]+"s linear forwards";
a2[i].style.animation = "page6smalllogorun .5s "+time2[1]+"s linear forwards";
a3[i].style.animation = "page6smalllogorun .5s "+time2[2]+"s linear forwards";
a4[i].style.animation = "page6smalllogorun .5s "+time2[3]+"s linear forwards";
for(var j = 0; j < time2.length; j++){
time2[j] += 0.1;
}
}
}
// 第七页效果
function page7show(){
var page7imgs = document.querySelectorAll('.page7 > img');
var time = 0;
for(var i = 0; i < page7imgs.length; i++){
if(i == 2){
page7imgs[i].style.animation = "mapshow .4s 1s linear forwards";
continue;
}
if(i == 5){
page7imgs[i].style.animation = "pointrun 2s 1.2s linear forwards infinite";
continue;
}
page7imgs[i].style.animation = "page1imgshow .5s "+time+"s linear forwards";
time += 0.5;
}
}
// 第八页效果
var earthruninterval = null;
function page8show(){
var page8imgs = document.querySelectorAll(".page8 > img");
var time = 0;
for(var i = 0; i < page8imgs.length; i++){
if(i == 2){
var x = 0;
setTimeout(function(){
earthruninterval = setInterval(function(){
page8imgs[2].style.opacity = x;
x += .02;
if(x >= 1){
page8imgs[2].style.opacity = 1;
clearInterval(earthruninterval);
page8imgs[2].style.animation = "earthrun 60s linear forwards infinite";
}
},3)
},1000)
continue;
}
page8imgs[i].style.animation = "page1imgshow .5s "+time+"s linear forwards";
time += 0.5;
}
}
<file_sep>
// 获取所有img
var topimgs90 = document.querySelectorAll('.bigbox .ballbox .topdeg90');
var topimgs45 = document.querySelectorAll('.bigbox .ballbox .topdeg45');
var topimgs225 = document.querySelectorAll('.bigbox .ballbox .topdeg225');
var imgs0 = document.querySelectorAll('.bigbox .ballbox .deg0');
var bottomimgs225 = document.querySelectorAll('.bigbox .ballbox .bottomdeg225');
var bottomimgs45 = document.querySelectorAll('.bigbox .ballbox .bottomdeg45');
var bottomimgs90 = document.querySelectorAll('.bigbox .ballbox .bottomdeg90');
var tips = document.querySelector('.ballbox .tips');
var xiebian = 120;
var startdeg = [0,45,90,135,180,225,270,315];
var startdeg2 = [10,70,130,190,250,310];
var startdeg3 = [20,110,200,290];
var startdeg5 = [15];
var isrun = false;
var imgsrun = null;
imgsrunnnnnn();
function imgsrunnnnnn(){
isrun = true;
imgsrun = setInterval(function(){
// 0
for(var i = 0; i < imgs0.length; i++){
var enddeg = startdeg[i]* Math.PI/180;
var x = xiebian* Math.cos(enddeg);
var z = xiebian* Math.sin(enddeg);
imgs0[i].style.transform = "translate3d("+x+"px,"+0+"px,"+z+"px)";
imgs0[i].style.transform = "-webkit-translate3d("+x+"px,"+0+"px,"+z+"px)";
startdeg[i]+=0.1;
}
// top225
for(var i = 0; i < topimgs225.length; i++){
var enddeg = startdeg2[i]* Math.PI/180;
var x = xiebian* Math.cos(enddeg) * 0.82;
var z = xiebian* Math.sin(enddeg);
topimgs225[i].style.transform = "translate3d("+x+"px,"+(-40)+"px,"+z+"px)";
topimgs225[i].style.transform = "-webkit-translate3d("+x+"px,"+(-25)+"px,"+z+"px)";
startdeg2[i]+=0.1;
}
// top 45
for(var i = 0; i < topimgs45.length; i++){
var enddeg = startdeg3[i]* Math.PI/180;
var x = xiebian* Math.cos(enddeg) * 0.6;
var z = xiebian* Math.sin(enddeg);
topimgs45[i].style.transform = "translate3d("+x+"px,"+(-80)+"px,"+z+"px)";
topimgs45[i].style.transform = "-webkit-translate3d("+x+"px,"+(-50)+"px,"+z+"px)";
startdeg3[i]+=0.1;
}
// top90
for(var i = 0; i < topimgs90.length; i++){
var enddeg = startdeg5[i]* Math.PI/180;
var x = xiebian* Math.cos(enddeg) * 0.1;
var z = xiebian* Math.sin(enddeg) * 0.1;
topimgs90[i].style.transform = "translate3d("+x+"px,"+(-120)+"px,"+z+"px)";
topimgs90[i].style.transform = "-webkit-translate3d("+x+"px,"+(-100)+"px,"+z+"px)";
startdeg5[i]+=0.1;
}
// bottom 225
for(var i = 0; i < bottomimgs225.length; i++){
var enddeg = startdeg2[i]* Math.PI/180;
var x = xiebian* Math.cos(enddeg) * 0.82;
var z = xiebian* Math.sin(enddeg);
bottomimgs225[i].style.transform = "translate3d("+x+"px,"+40+"px,"+z+"px)";
bottomimgs225[i].style.transform = "-webkit-translate3d("+x+"px,"+25+"px,"+z+"px)";
startdeg2[i]+=0.1;
}
// bottom 45
for(var i = 0; i < bottomimgs45.length; i++){
var enddeg = startdeg3[i]* Math.PI/180;
var x = xiebian* Math.cos(enddeg) * 0.6;
var z = xiebian* Math.sin(enddeg);
bottomimgs45[i].style.transform = "translate3d("+x+"px,"+80+"px,"+z+"px)";
bottomimgs45[i].style.transform = "-webkit-translate3d("+x+"px,"+50+"px,"+z+"px)";
startdeg3[i]+=0.1;
}
// bottom 90
for(var i = 0; i < bottomimgs90.length; i++){
var enddeg = startdeg5[i]* Math.PI/180;
var x = xiebian* Math.cos(enddeg) * 0.1;
var z = xiebian* Math.sin(enddeg) * 0.1;
bottomimgs90[i].style.transform = "translate3d("+x+"px,"+120+"px,"+z+"px)";
bottomimgs90[i].style.transform = "-webkit-translate3d("+x+"px,"+100+"px,"+z+"px)";
startdeg5[i]+=0.1;
}
},10)
}
var page2imgssss = document.querySelectorAll('.ballbox img');
for(var i = 0; i < page2imgssss.length; i++){
(function(i){
page2imgssss[i].addEventListener("click",function(){
if(isrun){
tips.style.display = "block";
tips.style.transform = this.style.transform;
clearInterval(imgsrun);
isrun = false;
}
})
})(i)
}
|
c67e2c12da5ddee2f7bd6167eb97635be69e4a32
|
[
"Markdown",
"JavaScript"
] | 3
|
Markdown
|
346865538/2014WeMedia
|
2e9542ff1d5250129286666df0eac4bc957ddc14
|
7c40c80a7b6f84aaa5ebc8e5d6cd1cf6c6b6ecff
|
refs/heads/master
|
<repo_name>hublemeer/magicaespeculo<file_sep>/public/javascripts/weather.js
'use strict';
var weather = function () {
$.simpleWeather({
location: config.weather.location,
woeid: config.weather.woeid,
unit: config.weather.unit,
success: function (weather) {
var skycons = new Skycons({"color": "white"}),
skycons_forecast = new Skycons({"color": "white"}),
current_weather = '<canvas id="weather-icon" width="128" height="128"></canvas>',
sunrise = parseTimeTo24Format(weather.sunrise),
sunset = parseTimeTo24Format(weather.sunset);
current_weather += '<div id= weather_desc>' + weather.currently + '</div>';
current_weather += '<div>' + weather.wind.direction + ' ' + weather.wind.speed + ' ' + weather.units.speed + '</div>';
current_weather += '<div><canvas id=sunrise width="18" height="18"> </canvas>' + sunrise + '<canvas id=sunset width="18" height="18"> </canvas> ' + sunset + '</div>';
$("#weather_now").html(current_weather);
var date,
weekday,
forecast,
forecastValue,
forecastIcons,
i;
for (i = 0; i < weather.forecast.length; i++) {
date = moment(new Date(weather.forecast[i].date));
weekday = date.isoWeekday();
forecast = days[weekday];
forecastValue = weather.forecast[i].high + ' ' + weather.forecast[i].low;
forecastIcons = "<canvas id=weather-forecast-" + i + "> </canvas>";
$("#weather-forecast-day-" + i).html(forecast);
$("#weather-forecast-value-" + i).html(forecastValue);
$("#weather-forecast-icon-" + i).html(forecastIcons);
}
skycons_forecast.add("sunrise", getAnimationforWeatherCode());
skycons_forecast.add("sunset", getAnimationforWeatherCode(31));
for (i = 0; i < weather.forecast.length; i++) {
skycons_forecast.add("weather-forecast-" + i, getAnimationforWeatherCode(weather.forecast[i].code));
}
skycons.remove('weather-icon');
var animation = getAnimationforWeatherCode(weather.code);
skycons.add("weather-icon", animation);
skycons.play();
},
error: function (error) {
$("#weather").html('<p>' + error + '</p>');
}
});
};
$(document).ready(function () {
weather();
setInterval(weather, 600000);
});
//hack to parse simpleweather time xx:xx am | xx:xx: pm to 24h format
function parseTimeTo24Format(time) {
var temp,
res = splitTimeToComponents(time);
if (time.indexOf("am") !== -1) {
temp = parseInt(res[0], 10) + ":" + res[1];
} else {
temp = parseInt(res[0], 10) + 12 + ":" + res[1];
}
return temp;
}
function splitTimeToComponents(time) {
var temp = time.substring(0, time.indexOf(" ")),
res = temp.split(":");
res = addLeadingZerosToMin(res);
return res;
}
function addLeadingZerosToMin(time) {
if (parseInt(time[1], 10) < 10) {
//var temp = time[1];
time[1] = 0 + time[1];
}
return time;
}
function getAnimationforWeatherCode(weathercode) {
var animation;
switch (parseInt(weathercode, 10)) {
case 0:
animation = 'sleet';
break;
case 1:
animation = 'sleet';
break;
case 2:
animation = 'sleet';
break;
case 3:
animation = 'sleet';
break;
case 4:
animation = 'sleet';
break;
case 5:
animation = 'snow';
break;
case 6:
animation = 'snow';
break;
case 7:
animation = 'snow';
break;
case 8:
animation = 'snow';
break;
case 9:
animation = 'rain';
break;
case 10:
animation = 'snow';
break;
case 11:
animation = 'rain';
break;
case 12:
animation = 'rain';
break;
case 13:
animation = 'snow';
break;
case 14:
animation = 'snow';
break;
case 15:
animation = 'snow';
break;
case 16:
animation = 'snow';
break;
case 17:
animation = 'sleet';
break;
case 18:
animation = 'sleet';
break;
case 19:
animation = 'fog';
break;
case 20:
animation = 'fog';
break;
case 21:
animation = 'fog';
break;
case 22:
animation = 'fog';
break;
case 23:
animation = 'wind';
break;
case 24:
animation = 'wind';
break;
case 25:
animation = 'cloudy';
break;
case 26:
animation = 'cloudy';
break;
case 27:
animation = 'partly-cloudy-night';
break;
case 28:
animation = 'partly-cloudy-day';
break;
case 29:
animation = 'partly-cloudy-night';
break;
case 30:
animation = 'partly-cloudy-day';
break;
case 31:
animation = 'clear-night';
break;
case 32:
animation = 'clear-day';
break;
case 33:
animation = 'clear-night';
break;
case 34:
animation = 'clear-day';
break;
case 35:
animation = 'sleet';
break;
case 36:
animation = 'clear-day';
break;
case 37:
animation = 'sleet';
break;
case 38:
animation = 'sleet';
break;
case 39:
animation = 'sleet';
break;
case 40:
animation = 'rain';
break;
case 41:
animation = 'snow';
break;
case 42:
animation = 'snow';
break;
case 43:
animation = 'snow';
break;
case 44:
animation = 'partly-cloudy-day';
break;
case 45:
animation = 'sleet';
break;
case 46:
animation = 'snow';
break;
default:
animation = 'clear-day';
}
return animation;
}
<file_sep>/public/javascripts/socketIO.js
var socket = io();
var alarmStatus = 0;
var latestWaterLeakReport;
socket.on('connect', function(data) {
console.log('connecting..');
// socket.emit('join', 'Hello World from client');
});
socket.on('mqtt', function(msg) {
var message = msg.topic.split('/');
var area = message[1];
var state = message[2];
var timestamp = Math.round((new Date()).getTime() / 1000);
let battery; //make it better
let batteryIcon;
switch (msg.topic) {
case 'home/engineroom/watermeter':
// get latest values from db to screen
watermeterData();
break;
// 433MHz door sensors
case 'home/rtl_433/sensor_1813':
case 'home/rtl_433/sensor_34238':
case 'home/rtl_433/sensor_50860':
case 'home/rtl_433/sensor_48187':
let sensorText = mqtttext.doorOpen;
if ( !msg.payload.id ) {
console.log(`unknown sensor ${JSON.stringify(msg)}`);
break;
}
const sensor = msg.payload.id.toString();
let sensorVal;
if(sensor === '1813') {
sensorVal = 'backdoor'
} else if(sensor === '34238') {
sensorVal = 'sidedoor'
} else if(sensor === '50860') {
sensorVal = 'frontdoor'
} else if(sensor === '48187') {
sensorVal = 'garagedoor'
}
if (msg.payload.cmd === 14) {
sensorText = mqtttext.doorClosed;
$(`#${sensorVal}status`).removeClass('badge-danger').addClass('badge badge-success');
} else if (msg.payload.cmd === 10) {
$(`#${sensorVal}status`).removeClass('badge-success').addClass('badge badge-danger');
} else {
console.log('unsupported cmd:', msg);
}
$(`#${sensorVal}text`).html(`${mqtttext[sensorVal]}`);
$(`#${sensorVal}status`).html(sensorText);
break;
case 'home/engineroom/waterleak':
let waterStatusMsg = mqtttext.statusOK;
// Store the report time
latestWaterLeakReport = msg.payload.time;
if (msg.payload.state === 0) {
$('#waterleakStatus').removeClass('badge-danger').removeClass('badge badge-warning').addClass('badge badge-success');
} else {
waterStatusMsg = mqtttext.waterLeakON;
$('#waterleakStatus').removeClass('badge-success').removeClass('badge badge-warning').addClass('badge badge-danger');
}
$('#waterleakText').html(mqtttext.waterLeak);
$('#waterleakStatus').html(waterStatusMsg);
checkWaterLeakLastReportTime();
break;
case 'home/alarm':
let alarmStatusMsg = mqtttext.statusOFF;
alarmStatus = msg.payload;
if (alarmStatus === 0) {
$('#alarmStatus').removeClass('badge-success').addClass('badge badge-danger');
} else {
alarmStatusMsg = mqtttext.statusON;
$('#alarmStatus').removeClass('badge-danger').addClass('badge badge-success')
}
$('#alarmText').html(mqtttext.alarmtext);
$('#alarmStatus').html(alarmStatusMsg);
break;
default:
console.log(`Error:no such MQTT topic handler in frontend UI. ${JSON.stringify(msg)}`);
break;
}
//swithc case per topic
});
var checkWaterLeakLastReportTime = function () {
try {
// check if report time more than 13h
checkIfDataIsStalefrom(latestWaterLeakReport, 780);
} catch (error) {
$('#waterleakStatus').removeClass('badge-success').removeClass('badge-danger').addClass('badge badge-warning');
$('#waterleakStatus').html(mqtttext.statusNA);
}
}
var toggleAlarm = function () {
alarmStatus ^= true;
const topic = 'home/alarm';
// Send socket.io message to mqtt server side which send the actual mqtt message
socket.emit('mqtt', {'topic' : topic, 'payload' : alarmStatus})
}
$(document).ready(function () {
var d = document.getElementById("alarmStatus");
d.onclick = function () {
toggleAlarm();
};
//evrey hour
checkWaterLeakLastReportTime();
setInterval(checkWaterLeakLastReportTime, 3600000);
});
|
c7a533949f7faba6cef9128d9f431f075c286c32
|
[
"JavaScript"
] | 2
|
JavaScript
|
hublemeer/magicaespeculo
|
93563aaccdfd4181cb8724bf2101890d05baa75e
|
b644fc782e9162925cf8d17dbbf2ecf3cd6bbb19
|
refs/heads/master
|
<file_sep>package com.twq.parser.utils;
import com.twq.parser.utils.ParserUtils;
/*网站URL信息
包含:
rawUrl -> https://www.underarmour.cn/s-HOVR?qf=11-149&pf=&sortStr=&nav=640#NewLaunch
schema -> https
hostport(domain) -> www.underarmour.cn
path -> /s-HOVR
query -> qf=11-149&pf=&sortStr=&nav=640
fragment -> NewLaunch
*/
public class UrlInfo {
private String rawUrl;
private String schema;
private String hostport;
private String path;
private String query;
private String fragment;
//set方法提到构造器中,在new UrlInfo初始
public UrlInfo(String rawUrl, String schema, String hostport,
String path, String query, String fragment) {
this.rawUrl = rawUrl;
this.schema = schema;
this.hostport = hostport;
this.path = path;
this.query = query;
this.fragment = fragment;
}
//特别提供四个方法 分别获取 getPathQueryFragment,没有query的URL,整个URL(raw),Domain(hostport)
public String getPathQueryFragment() {
if (ParserUtils.isNullOrEmptyOrDash(query) && ParserUtils.isNullOrEmptyOrDash(fragment)) {
return ParserUtils.notNull(path);
} else if (ParserUtils.isNullOrEmptyOrDash(query) && !ParserUtils.isNullOrEmptyOrDash(fragment)) {
return path + "#" + fragment;
} else if (!ParserUtils.isNullOrEmptyOrDash(query) && ParserUtils.isNullOrEmptyOrDash(fragment)) {
return path + "?" + query;
} else {
return path + "?" + query + "#" + fragment;
}
}
public String getUrlWithoutQuery() {
if (ParserUtils.isNullOrEmptyOrDash(path)) {
//eg:https://www.underarmour.cn/
return schema + "://" + hostport;
} else {
//eg:https://www.underarmour.cn/s-HOVR
return schema + "://" + hostport + path;
}
}
//包含业务含义对rawUrl的Set
public String getFullUrl() {
if (ParserUtils.isNullOrEmptyOrDash(rawUrl)) {
return "-";
} else {
return rawUrl;
}
}
//包含业务含义对hostport域名的Set
public String getDomain() {
if (ParserUtils.isNullOrEmptyOrDash(hostport)) {
return "-";
} else {
return hostport;
}
}
public String getSchema() {
return schema;
}
public String getPath() {
return path;
}
public String getQuery() {
return query;
}
public String getFragment() {
return fragment;
}
}
<file_sep>--预解析阶段初始化脚本
--主ETL阶段初始化脚本
<file_sep># web-traffic-analysis
#tracker:Jave Script采集用户访问网站行为数据
#tracker-logServer:接收并在本地存储tracker数据
#backend:后端用于数据的清洗和解析处理,最终入库到hive
#frontend:前端用于数据的分析和可视化<file_sep>package com.twq.parser.dataobject;
//空接口,用于实现,提取接口得到构建者充实的具体对象
//实现类有InvalidLogObject BaseObject
public interface ParsedDataObject {
}
|
dcda196dda76f0415589d27cc453f147f1e93b45
|
[
"Markdown",
"Java",
"SQL"
] | 4
|
Java
|
Madcue/web-traffic-analysis
|
6afc7db7a6f219fddb1555fa169bdb542aa78a11
|
029ead7af850600cef501762a71e5fa77adb88c3
|
refs/heads/master
|
<file_sep>import pandas as pd
import scipy
import scipy.cluster.hierarchy as sch
from scipy.cluster.hierarchy import fcluster
from scipy.cluster.hierarchy import dendrogram, linkage
data = pd.read_csv('/Users/jrobertson/Desktop/Heidelberg_snp_mat.txt',sep='\t',header=0,index_col=0)
data_labels = data.columns.values
distance_matrix = data.as_matrix()
condensed_matrix = scipy.spatial.distance.squareform(distance_matrix)
Z = scipy.cluster.hierarchy.linkage(condensed_matrix, method='single')
dists = (0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100)
clust_assignments = dict()
for dist in dists:
index = 0
clusters = fcluster(Z, dist, criterion='distance')
for id in data.columns.values:
if not id in clust_assignments:
clust_assignments[id] = list()
clust_assignments[id].append(str(clusters[index]))
index += 1
for id in clust_assignments:
print ("{}\t{}".format(id,"\t".join(clust_assignments[id])))
|
028462db2457e6a1b55077a374f47135e6ffb5f0
|
[
"Python"
] | 1
|
Python
|
jrober84/population_genetics
|
b59c3ac7dea54de5c15e80ce3ee5229eb280ec47
|
84076f3998987f412bd71439dec69e67fb49d12c
|
refs/heads/master
|
<file_sep>//<NAME>, группа 108, семинар 7, задача 22
//Создание архива: название файлов, в него включенных, размеры, содержимое
# include <stdlib.h>
# include <stdio.h>
int main()
{
char fname1[1000]; char fname2[1000]; char c; //массивы для ввода названий файлов
FILE* f = fopen("Archive.txt", "w"); //открытие (создание) файла-архива
//Выбор пользователем файлов
printf("Type the name of file1: ");
scanf("%s", &fname1);
printf("\n");
printf("Type the name of file2: ");
scanf("%s", &fname2);
printf("\n");
//Открытие файлов на чтение
FILE* f1 = fopen(fname1, "r");
FILE* f2 = fopen(fname2, "r");
//Вычисление размеров файлов
int size1 = 0;
while (!feof(f1)) {
c = fgetc(f1);
size1++;
}
int size2 = 0;
while (!feof(f2)) {
c = fgetc(f2);
size2++;
}
//Вывод информации о названии файлов и их размерах
fprintf(f, "1. %s (%d B) \n", fname1, size1);
fprintf(f, "2. %s (%d B) \n", fname2, size2);
fclose(f1); fclose(f2);
f1 = fopen(fname1, "r");
f2 = fopen(fname2, "r");
//Перенесение информации из файлов в один
while (!feof(f1)) {
c = fgetc(f1);
if (c != EOF) fputc(c, f);
}
fprintf(f, "\n----------------------\n");
while (!feof(f2)) {
c = fgetc(f2);
if (c != EOF) fputc(c, f);
}
fprintf(f, "\n----------------------\n");
fclose(f);
system("pause");
}
|
dc2364b30069977ff7b05874df814cad4a7a956e
|
[
"C++"
] | 1
|
C++
|
koil257/Task6
|
bc5bbbca015284e3abb85972bb89ec703de512ea
|
cd5c1347de1b22e0620b2ce341880253f3a25045
|
refs/heads/master
|
<file_sep>let sum=0;
let numofvalues=0;
function Targil1(mat){
for( i = 0; i< mat.length; i++){
for(j = 0; j < mat[i].length; j++){
sum+=mat[i][j];
numofvalues++
}
}
console.log(sum / numofvalues );
}
Targil1([[1,2,3,4,5],[6,7,8,9,10],[11,12,13,14,15]]);
function bubbleSort(array){
let len=array.length;
for(let j = 0; j <len; j++){
for(let i=0;i<len; i++){
if(array[i]< array[i+1]){
let temp= array[i+1];
array[i+1]= array[i];
array[i]=temp;
}
}
}
console.log(array);
}
bubbleSort([10,245,2315,545]);
// debugger
// function singleNum(arr){
// let temp=0;
// for(i = 0; i < arr.length; i++){
// for(j = i+1; j < arr.length; j++){
// if(arr[i]==arr[j]){
// temp=arr[j] ;
// arr[j]=arr[arr.length-1];
// arr[arr.length-1]=temp;
// arr.pop();
// // if(i>0){
// // i--;
// // }
// break;
// }
// }
// }
// console.log(arr);
// // return arr;
// }
// singleNum([2,2,5,7,5,7]);
// // debugger
// function useOfTargil3(arr2 , arr3){
// let arr2new=singleNum(arr2);
// let arr3new=singleNum(arr3);
// let Narray=[0];
// for(i = 0; i < arr2new.length; i++){
// for(j = 0; i < arr3new.length; j++){
// if(arr2new[i]==arr3new[j]){
// Narray.push(arr2new[i]);
// break;
// }
// }
// }
// console.log(Narray);
// // return Narray;
// }
// useOfTargil3([3,5,7,1,3,9] , [2,2,5,7,5,7]);
function primeNumber(num){
for(let i=2; i<=(num-1); i++) {
if(num%i==0){
console.log('not prime');
return false;
}
}
console.log('prime');
return true;
}
// primeNumber(10);
// debugger
let primal=0;
function Targil4(rishoniMat){
for(m = 0; m < rishoniMat.length; m++){
for(n = 0;n < rishoniMat[m].length; n++){
if(primeNumber(rishoniMat[m][n])){
primal++;
}
}
}
console.log(primal);
}
Targil4([[10,11,12,13,14],[20,21,22,23,24],[30,31,32,33,34]]);
let count =0;
function Targil5(mat){
for(i = 0; i < mat.length;i++){
for(j = 0; j < mat[i].length; j++){
if(mat[i][j] >= 'A' && mat[i][j] <= 'Z'){
count++
}
}
}
console.log(count);
}
Targil5([['d','b','R','n','u'],['S','X','a','w','y'], ['v','u','Q','p']]);
function islandAmount(mat){
let counter = 0;
for( i = 0; i <mat.length; i++){
for(j = 0; j < mat[i].length; j++){
if(mat[i][j]==1){
if(isIsland(mat,i, j , mat.length)){
counter++;
}
}
}
}
return counter;
}
function isIsland(mat, i ,j, lenghtOfMatrix){
if( (j == 0 || mat[i][j-1]==0) &&
((i == 0 || j ==0) || mat[i-1][j-1]==0) &&
(i==0 || mat[i-1][j]==0) &&
((i == 0 || j == lenghtOfMatrix-1) || mat[i-1][j+1]==0) &&
(j == lenghtOfMatrix-1 || mat[i][j+1]==0) &&
(( i == lenghtOfMatrix-1 || j == lenghtOfMatrix-1) || mat[i+1][j+1]==0) &&
(i== lenghtOfMatrix-1 || mat[i+1][j]==0) &&
((i == lenghtOfMatrix-1 || j == 0) || mat[i+1][j-1]==0)){
return true;
}
return false;
}
console.log(islandAmount([[0,0,0,1],
[0,1,0,1],
[0,0,0,0],
[0,0,1,0]]));
// debugger
function check(array){
for(i = 1; i <= 9; i++){
if(array.indexOf(i) == -1)
{
return false;
}
}
return true;
}
debugger
function allOfNums(mat){
let contain1=0;
for(i = 0; i < 9; i++){
if(check(mat[i])){
contain1++;
}
}
console.log(contain1);
}
allOfNums([[1,2,3,4,5,6,7,8,9],
[9,8,8,7,6,5,4,3,2],
[1,2,3,4,5,6,7,8,4],
[2,1,3,4,5,5,6,7,8],
[1,1,1,1,1,1,1,1,1],
[9,8,7,6,5,4,3,2,1],
[5,4,3,2,1,6,7,8,9],
[2,2,3,3,4,4,5,5,6],
[3,2,1,4,5,6,7,8,3]]);
// פונקציה שבודקת אם יש במערך כפל מספרים מ 1 עד 9 ואם כן היא מחזירה "לא טוב" ואם לא אז מחזירה "טוב".ו
// היא חוזרת לתוך משתנה בטנקציה העיקריצ שתפקידה לבדוק אם חזרו 9 פעמים "טוב"
// ואם באמת חזרו פעמים 9 "טוב" אז ההמטריצה תקינה ואם לא אז היא לא תקינה
<file_sep>
//CR: What if they both the same? no console log would be writen.
//CR: You dont need another if statement after the first one. Because of course if num1 is not bigger than num2 so its smaller (or equal)
function whichNumIsBigger(num1,num2){
if(num1>num2){
console.log(num1);
}
else if(num2>num1){
console.log(num2);
}
}
let y= 6;
let z= 9;
whichNumIsBigger(z,y);
// which num is bigger
function biggerThan3Chars(str1,str2,str3,str4){
let x=0;
if(str1.length>3){
x++;
}
if(str2.length>3){
x++;
}
if(str3.length>3){
x++;
}
if(str4>3){
x++
}
console.log(x);
}
let name= 'doriel';
let id= '206087298';
let surename= 'aboya';
let = nickname= 'princeOfBellAir';
biggerThan3Chars(name,id,surename,nickname);
//if the string is bigger than 3 chars
function thereIsAnA(){
let unknown= document.getElementById('name').value;
let n=unknown.indexOf('A');
if(n!=-1){
console.log(unknown.replace(unknown[n] , 'a'));
}
}
//change A to a
function evenOrNONegOrPos(n1){
if(n1>0 && n1%2!=0){
console.log('even,odd');
}
if(n1>0 && n1%2==0){
console.log('even, double');
}
if(n1<0 && n1%2!=0){
console.log('negative, odd');
}
if(n1<0 && n1%2==0){//CR: Again -> There is no need for the last check because its the only option left
console.log('negative, double');
}
}
evenOrNONegOrPos(15);
// check if the number is even odd double or negative
function compareLastAndFirstLetter(str){
let n= str.length;
if(n[0]==n[n-1]){ //CR: n is a number (its a length), So how do you treat it like an array (with n[0])?
let newStr=str.slice(1,n-1);
console.log(newStr);
}
}
compareLastAndFirstLetter('dorield');
// compare between the first and the last letter of the char
function symbolsChecks(str1){
let position= str1.indexOf('*');
if(str1.indexOf('*')!=-1){ //CR: You already calculated the value in the last line and put the value in "position". dont calculate it again. use the variable
console.log(str1.slice(0,position) + str1.slice(position+1));
}
let shtrudel=str1.indexOf('@');
if(shtrudel!=-1){
console.log('*'+ str1 +'*');
}
if(str1.indexOf('₪')!=0){
console.log('new shekels');
}
}
symbolsChecks('d₪o@rie*l');
//remove the * from the string
//add * at the beginning and end if their is a @
//pring new shekels if teir is ₪ <file_sep>
function playEasy(){ // open the game at the easy level
location.replace("game.html?playEasy");
}
function playMedium(){//open the game on the medium level
location.replace("game.html?playMedium");
}
function playHard(){ //open the game on the hard level
location.replace("game.html?playHard");
}
// function fullTheSudoku(mat){// function that insert the numbers of sudoku game into each cell of the table
// table= document.getElementById('table');
// let number=0;
// for(r = 0; r < table.rows.length; r++){
// for(c = 0;c < table.rows[r].cells.length; c++){
// table.rows[r].cells[c].innerHTML = mat[r][c];
// }
// }
// }
// fullTheSudoku([[9,1,4,7,8,2,3,6,5],
// [3,8,5,4,1,6,7,9,2],
// [7,2,6,5,9,3,4,1,8],
// [8,9,2,6,3,7,5,4,1],
// [4,6,3,2,5,1,8,7,9],
// [5,7,1,9,4,8,2,3,6],
// [1,4,8,3,2,9,6,5,7],
// [2,5,7,1,6,4,9,8,3],
// [6,3,9,8,7,5,1,2,4]]);<file_sep>import React, { Component } from 'react';
import {
BrowserRouter as Router,
Switch,
Route,
Link
} from "react-router-dom";
export default class HomePage extends Component {
constructor(props) {
super(props)
this.state = {
roomList: [],
}
}
pushRoomToArr = () =>{
let oldRoomList = this.state.roomList;
this.setState({roomList:[...oldRoomList,this.props.room]})
console.log(this.roomList);
}
render() {
return (
<div>
{this.state.roomList.map((element)=>{
return <div><Link to="/room"> <button style={{backgroundColor:this.props.roomcolor}}>
{element.props.roomname}'s room </button></Link></div> })}
<Link to="/add"><button>ADD</button></Link>
</div>
)
}
}
<file_sep>
debugger
function difficultyLevel(mat){
let difflevel= location.search;
let randomrunrow;
let randomruncoll;
let numberstoclear;
// var mat = 0;
let empty;
if(difflevel=='?playEasy'){
numberstoclear=20;
}
if(difflevel=='?playMedium'){
numberstoclear=40;
}
if(difflevel=='?playHard'){
numberstoclear=60;
}
while(numberstoclear>0){
randomrunrow= Math.floor(Math.random()*9);
randomruncoll= Math.floor(Math.random()*9);
// console.log(mat[randomrunrow][randomruncoll]);
if(mat[randomrunrow][randomruncoll]!=""){
mat[randomrunrow][randomruncoll]='';
numberstoclear--;
}
}
table= document.getElementById('table');
let number=0;
for(r = 0; r < table.rows.length; r++){
for(c = 0;c < table.rows[r].cells.length; c++){
table.rows[r].cells[c].innerHTML = mat[r][c];
}
}
}
// debugger
function fullTheSudoku(mat){ // function that insert the numbers of sudoku game into each cell of the table
table= document.getElementById('table');
let number=0;
for(r = 0; r < table.rows.length; r++){
for(c = 0;c < table.rows[r].cells.length; c++){
table.rows[r].cells[c].innerHTML = mat[r][c];
}
}
console.log(mat);
difficultyLevel(mat);
}
fullTheSudoku([[9,1,4,7,8,2,3,6,5],
[3,8,5,4,1,6,7,9,2],
[7,2,6,5,9,3,4,1,8],
[8,9,2,6,3,7,5,4,1],
[4,6,3,2,5,1,8,7,9],
[5,7,1,9,4,8,2,3,6],
[1,4,8,3,2,9,6,5,7],
[2,5,7,1,6,4,9,8,3],
[6,3,9,8,7,5,1,2,4]]);
difficultyLevel([[9,1,4,7,8,2,3,6,5],
[3,8,5,4,1,6,7,9,2],
[7,2,6,5,9,3,4,1,8],
[8,9,2,6,3,7,5,4,1],
[4,6,3,2,5,1,8,7,9],
[5,7,1,9,4,8,2,3,6],
[1,4,8,3,2,9,6,5,7],
[2,5,7,1,6,4,9,8,3],
[6,3,9,8,7,5,1,2,4]]);
<file_sep>import React, { Component } from 'react'
import HomePage from './components/HomePage.js';
import Header from './components/Header.js';
import AddRoom from './components/AddRoom.js';
import {
BrowserRouter as Router,
Switch,
Route,
Link
} from "react-router-dom";
import Room from './components/Room.js';
export default class App extends Component {
state={
rooms:[{roomName:'', roomColor:'', roomType:''}]
}
UpdateTheState=(r)=>{
this.setState({rooms:r}) //CR: Problem: rooms its a list and you change it to a single room. you need to add the room to the list and not change the entire rooms variable
console.log(this.state.rooms.roomName); //CR: Rooms suppose to be a list of items, rooms. you cant access roonName of a list because its not an object its a list
}
render() {
return (
<div className="App">
<Header/>
<Router>
<Switch>
<Route exact path="/">
<HomePage roomname={this.state.rooms.roomName} roomcolor={this.state.rooms.roomColor} room={this.state.rooms}/>
</Route>
<Route path="/add">
<AddRoom update={this.UpdateTheState}/>
</Route>
<Route path="/room">
<Room/>
</Route>
</Switch>
</Router>
</div>
);
}
}
<file_sep>// let student = {
// name: 'doriel',
// id: '123456789',
// age: 24,
// city:{
// address: 'igal alon',
// number: 92
// }
// };
// console.log(student.name);
// student.age++;
// console.log(student.age);
// console.log(student);
// student.car='skoda';
// console.log(student);
// console.log(student.city.address);
let listOfStudents=[
{name:'doriel', id: '1234'},
{name:'timna', id: '4321'},
{name:'liora', id: '2134'}];
for( i = 0; i < listOfStudents.length; i++){
console.log(listOfStudents[i].id);
}
// כתבו םונקציה שמקבלת מערך ל אנשים , הפונקציה תיצור מערך חדש עם כל האנשים שגרים בתל אביב ותחזיר אותוץ
function telAvivCitizen(arr){
let newarr=[];
for(i = 0; i < arr.length; i++){
if(arr[i].city =='tel aviv'){
newarr.push(arr[i].name) ;
// newarr=arr[i].name;
}
}
console.log(newarr);
}
telAvivCitizen([{name: 'doriel', city: 'tel aviv'},{name: 'timna', city: 'tel aviv'},{name: 'omer', city: 'qiryat gat'}]);
function randomNumber(){
let num= Math.floor(Math.random()*10);
document.getElementById('showme').innerHTML=num;
}
|
6662f5d4901a39f1c34dae2160c6068feff9a0d5
|
[
"JavaScript"
] | 7
|
JavaScript
|
omer-checkmarx/StudyCourse
|
f9dedbb60143748720c654c3ef00538472420fdf
|
3e7d70009c881d4d7d9b78a936ba120bde1c4524
|
refs/heads/master
|
<repo_name>jackiehoon/coderbyte<file_sep>/FirstFactorial.rb
def FirstFactorial(num)
k=1
1.upto(num) do |x|
k = x*k
end
num=k
# code goes here
return num
end
<file_sep>/ABCheck.rb
def ABCheck(str)
str=str.split(//).to_a
k=false
a=0
str.each do |x|
if x=="a"
if str[a+4] == "b"
k=true
end
end
a+=1
end
# code goes here
return k
end
<file_sep>/PowersofTwo.rb
def PowersofTwo(num)
while 1
if num%2 !=0
hot=false
return hot
elsif num == 2
hot = true
return hot
else
num=num/2
end
end
# code goes here
return hot
end
<file_sep>/WordCount.rb
def WordCount(str)
str=str.split
str=str.length
# code goes here
return str
end
<file_sep>/ThirdGreatest.rb
def ThirdGreatest(strArr)
k=Array.new
i=0
strArr.each do |x|
k[i]=x.split(//).to_a
i+=1
end
j=0
l=Array.new
k.each do |x|
l[j]=x.length
j+=1
end
l[l.index(l.max)]= 0
l[l.index(l.max)]=0
w=strArr[l.index(l.max)]
# code goes here
return w
end
<file_sep>/MultiplicativePersistence.rb
def MultiplicativePersistence(num)
sum=1
k=0
while num>10
num=num.to_s.split(//)
num.each{|x| sum=sum*x.to_i}
num=sum
k+=1
sum=1
end
# code goes here
return k
# code goes here
return num
end
<file_sep>/VowelCount.rb
def VowelCount(str)
str=str.split(//).to_a
sum=0
str.each do |x|
if x=="A"|| x=="E"|| x=="I"|| x=="O"|| x=="U"|| x=="a"|| x=="i"|| x=="o"|| x=="u"|| x=="e"
sum+=1
end
end
# code goes here
return sum
end
<file_sep>/CheckNums.rb
def CheckNums(num1,num2)
if num1>num2
num1=false
elsif num1<num2
num1=true
else
num1=-1
end
# code goes here
return num1
end
<file_sep>/LetterChanges.rb
def LetterChanges(str)
b = str.bytes.to_a
k=0
j = Array.new
s=0
t=0
b.each do |x|
if (x.to_i <= 90 && x.to_i >= 65 || x.to_i >=97 && x.to_i <=122)
j[s]= x.succ
else
j[s]=x.to_i
end
s+=1
end
j.each do |x|
if x ==nil
j[t] = " "
else
j[t] = x.chr
end
if j[t] == "a" || j[t] == "e" || j[t] == "i" || j[t] == "o" || j[t] == "u"
j[t]=j[t].capitalize
end
t+=1
end
j=j.join
# code goes here
return j
end
<file_sep>/SimpleAdding.rb
def SimpleAdding(num)
k=0
1.upto(num) do |x|
k+=x
end
num=k
# code goes here
return num
end
<file_sep>/Palindrome.rb
def Palindrome(str)
str=str.delete(" ")
if str == str.reverse
str = true
else
str= false
end
# code goes here
return str
end
<file_sep>/AdditivePersistence.rb
def AdditivePersistence(num)
sum=0
k=0
while num>10
num=num.to_s.split(//)
num.each{|x| sum+=x.to_i}
num=sum
k+=1
sum=0
end
# code goes here
return k
end
<file_sep>/TimeConvert.rb
def TimeConvert(num)
a=num/60
b=num-a*60
num=a.to_s+":"+b.to_s
# code goes here
return num
end
|
0ee9c8a5b79dcc3cea0d63b335c9c255c948c92b
|
[
"Ruby"
] | 13
|
Ruby
|
jackiehoon/coderbyte
|
dfad885880bc88c1f6b47ef41f8c80fa3272be43
|
74232c026cb56b0d96f9ee631f6f0e4cd8eb88b6
|
refs/heads/main
|
<file_sep># DataEntrySystem
it is complete example of Repository pattern with .net core and ef.
|
675c3b2930c3412c67e79b67146c04f379c1977f
|
[
"Markdown"
] | 1
|
Markdown
|
kalusinghrao07/DataEntrySystem
|
f4a1e59a199e3107f0c2a094cd7e038a7e8ba27e
|
1eb8169f9718d9f4b23d4a33eebf6910e257f2cd
|
refs/heads/master
|
<repo_name>penguinn/prpc<file_sep>/common.go
package prpc
import (
"sync"
"reflect"
"log"
"errors"
"io"
"strings"
)
type methodType struct {
sync.Mutex // protects counters
method reflect.Method
ArgType reflect.Type
ReplyType reflect.Type
numCalls uint
}
type Common struct {
mu sync.RWMutex
serviceMap map[string]*service
hedMutex sync.Mutex
header Header
mutex sync.Mutex // protects following
seq uint64
pending map[uint64]*Call
closing bool // user has called Close
shutdown bool // server has told us to stop
Free
}
// Register publishes in the server the set of methods of the
// receiver value that satisfy the following conditions:
// - exported method of exported type
// - two arguments, both of exported type
// - the second argument is a pointer
// - one return value, of type error
// It returns an error if the receiver is not an exported type or has
// no suitable methods. It also logs the error using package log.
// The client accesses each method using a string of the form "Type.Method",
// where Type is the receiver's concrete type.
func (common *Common) Register(rcvr interface{}) error {
return common.register(rcvr, "", false)
}
// RegisterName is like Register but uses the provided name for the type
// instead of the receiver's concrete type.
func (common *Common) RegisterName(name string, rcvr interface{}) error {
return common.register(rcvr, name, true)
}
func (common *Common) register(rcvr interface{}, name string, useName bool) error {
common.mu.Lock()
defer common.mu.Unlock()
if common.serviceMap == nil {
common.serviceMap = make(map[string]*service)
}
s := new(service)
s.typ = reflect.TypeOf(rcvr)
s.rcvr = reflect.ValueOf(rcvr)
sname := reflect.Indirect(s.rcvr).Type().Name()
if useName {
sname = name
}
if sname == "" {
s := "rpc.Register: no service name for type " + s.typ.String()
log.Print(s)
return errors.New(s)
}
if !isExported(sname) && !useName {
s := "rpc.Register: type " + sname + " is not exported"
log.Print(s)
return errors.New(s)
}
if _, present := common.serviceMap[sname]; present {
return errors.New("rpc: service already defined: " + sname)
}
s.name = sname
// Install the methods
s.method = suitableMethods(s.typ, true)
if len(s.method) == 0 {
str := ""
// To help the user, see if a pointer receiver would work.
method := suitableMethods(reflect.PtrTo(s.typ), false)
if len(method) != 0 {
str = "rpc.Register: type " + sname + " has no exported methods of suitable type (hint: pass a pointer to value of that type)"
} else {
str = "rpc.Register: type " + sname + " has no exported methods of suitable type"
}
log.Print(str)
return errors.New(str)
}
common.serviceMap[s.name] = s
return nil
}
// suitableMethods returns suitable Rpc methods of typ, it will report
// error using log if reportErr is true.
func suitableMethods(typ reflect.Type, reportErr bool) map[string]*methodType {
methods := make(map[string]*methodType)
for m := 0; m < typ.NumMethod(); m++ {
method := typ.Method(m)
mtype := method.Type
mname := method.Name
// Method must be exported.
if method.PkgPath != "" {
continue
}
// Method needs three ins: receiver, *args, *reply.
if mtype.NumIn() != 3 {
if reportErr {
log.Println("method", mname, "has wrong number of ins:", mtype.NumIn())
}
continue
}
// First arg need not be a pointer.
argType := mtype.In(1)
if !isExportedOrBuiltinType(argType) {
if reportErr {
log.Println(mname, "argument type not exported:", argType)
}
continue
}
// Second arg must be a pointer.
replyType := mtype.In(2)
if replyType.Kind() != reflect.Ptr {
if reportErr {
log.Println("method", mname, "reply type not a pointer:", replyType)
}
continue
}
// Reply type must be exported.
if !isExportedOrBuiltinType(replyType) {
if reportErr {
log.Println("method", mname, "reply type not exported:", replyType)
}
continue
}
// Method needs one out.
if mtype.NumOut() != 1 {
if reportErr {
log.Println("method", mname, "has wrong number of outs:", mtype.NumOut())
}
continue
}
// The return type of the method must be error.
if returnType := mtype.Out(0); returnType != typeOfError {
if reportErr {
log.Println("method", mname, "returns", returnType.String(), "not error")
}
continue
}
methods[mname] = &methodType{method: method, ArgType: argType, ReplyType: replyType}
}
return methods
}
func(common *Common) ReadLoop(codec Codec)(err error){
var header Header
for err == nil {
header = Header{}
err := codec.ReadHeader(&header)
if err != nil {
log.Println("[error] read header error:", err)
break
}
if header.IsResponse {
log.Println("Response")
log.Println("response.ServiceMethod ", header.ServiceMethod)
seq := header.Seq
common.mutex.Lock()
call := common.pending[seq]
delete(common.pending, seq)
common.mutex.Unlock()
switch {
case call == nil:
// We've got no pending call. That usually means that
// WriteRequest partially failed, and call was already
// removed; response is a server telling us about an
// error reading request body. We should still attempt
// to read error body, but there's no one to give it to.
err = codec.ReadBody(nil)
if err != nil {
err = errors.New("reading error body: " + err.Error())
}
case header.Error != "":
// We've got an error response. Give this to the request;
// any subsequent requests will get the ReadResponseBody
// error if there is one.
call.Error = ServerError(header.Error)
err = codec.ReadBody(nil)
if err != nil {
err = errors.New("reading error body: " + err.Error())
}
call.done()
default:
err = codec.ReadBody(call.Reply)
if err != nil {
call.Error = errors.New("reading body " + err.Error())
}
call.done()
}
} else {
log.Println("Request")
sending := new(sync.Mutex)
service, mtype, argv, replyv, keepReading, err := common.decodeReqHeader(&header, codec)
log.Println(3)
if err != nil {
if !keepReading {
log.Println(1)
return err
}
// send a response if we actually managed to read a header.
if &header != nil {
common.sendResponse(sending, &header, invalidRequest, codec, err.Error())
common.freeRequest(&header)
}
return err
}
go service.dispose(common.Free, sending, mtype, &header, argv, replyv, codec)
}
}
common.hedMutex.Lock()
//defer client.hedMutex.Unlock()
common.mutex.Lock()
//defer client.mutex.Unlock()
common.shutdown = true
closing := common.closing
if err == io.EOF {
if closing {
err = ErrShutdown
} else {
err = io.ErrUnexpectedEOF
}
}
for _, call := range common.pending {
call.Error = err
call.done()
}
common.mutex.Unlock()
common.hedMutex.Unlock()
if err != io.EOF && !closing {
log.Println("rpc: server protocol error:", err)
}
return nil
}
func (common *Common) decodeReqHeader(header *Header, codec Codec) (service *service, mtype *methodType, argv, replyv reflect.Value, keepReading bool, err error) {
service, mtype, keepReading, err = common.decodeRequestHeader(header)
if err != nil {
if !keepReading {
return
}
// discard body
codec.ReadBody(nil)
return
}
// Decode the argument value.
argIsValue := false // if true, need to indirect before calling.
if mtype.ArgType.Kind() == reflect.Ptr {
argv = reflect.New(mtype.ArgType.Elem())
} else {
argv = reflect.New(mtype.ArgType)
argIsValue = true
}
// argv guaranteed to be a pointer now.
if err = codec.ReadBody(argv.Interface()); err != nil {
return
}
if argIsValue {
argv = argv.Elem()
}
replyv = reflect.New(mtype.ReplyType.Elem())
return
}
func (common *Common) decodeRequestHeader(req *Header) (service *service, mtype *methodType, keepReading bool, err error) {
// Grab the request header.
//req = server.free.getRequest()
//
//req = header
// We read the header successfully. If we see an error now,
// we can still recover and move on to the next request.
keepReading = true
dot := strings.LastIndex(req.ServiceMethod, ".")
if dot < 0 {
err = errors.New("rpc: service/method request ill-formed: " + req.ServiceMethod)
return
}
serviceName := req.ServiceMethod[:dot]
methodName := req.ServiceMethod[dot+1:]
// Look up the request.
common.mu.RLock()
service = common.serviceMap[serviceName]
common.mu.RUnlock()
if service == nil {
err = errors.New("rpc: can't find service " + req.ServiceMethod)
return
}
mtype = service.method[methodName]
if mtype == nil {
err = errors.New("rpc: can't find method " + req.ServiceMethod)
}
return
}<file_sep>/README.md
# prpc
bothway rpc
<file_sep>/codec/conn_test.go
package codec
import (
"bufio"
"bytes"
"crypto/rand"
"io"
"reflect"
"testing"
"github.com/penguinn/prpc/codec/internal"
)
var (
testMessage internal.Struct
testData [defaultBufferSize + 8]byte
)
func init() {
if _, err := io.ReadFull(rand.Reader, testData[:]); err != nil {
panic("not enough entropy")
}
testMessage.Method = "test.service.method"
testMessage.Seq = 1<<64 - 1
testMessage.Bucket = make([][]byte, 16)
for i := 0; i < 16; i++ {
testMessage.Bucket[i] = testData[:]
}
testMessage.Data = testData[:]
}
func TestWriteReadFrame(t *testing.T) {
t.Parallel()
buf := &bytes.Buffer{}
enc := NewEncoder(buf)
if err := enc.writeFrame(testData[:]); err != nil {
t.Fatalf("write frame: %v", err)
}
data, err := readFull(bufio.NewReader(buf), nil)
if err != nil {
t.Fatalf("read frame: %v", err)
}
if bytes.Compare(testData[:], data) != 0 {
t.Fatalf("expected frame %q, got %q", testData[:], data)
}
}
func TestEncodeDecode(t *testing.T) {
t.Parallel()
req := testMessage
resp := internal.Struct{}
buf := &bytes.Buffer{}
enc := NewEncoder(buf)
dec := NewDecoder(buf)
if err := enc.Encode(&req); err != nil {
t.Fatalf("encode request: %v", err)
}
if err := dec.Decode(&resp); err != nil {
t.Fatalf("decode request: %v", err)
}
if !reflect.DeepEqual(req, resp) {
t.Fatalf("encode/decode: expected %#v, got %#v", req, resp)
}
}
<file_sep>/service.go
package prpc
import (
"sync"
"reflect"
)
type Conn struct {
Codec Codec
Data map[string]interface{}
}
type Header struct{
IsResponse bool
ServiceMethod string
Seq uint64
Error string
next *Header
}
type Codec interface{
ReadHeader(*Header) (error)
ReadBody(interface{}) error
Write(*Header, interface{}) error
Close() error
}
type service struct {
name string // name of service
rcvr reflect.Value // receiver of methods for the service
typ reflect.Type // type of the receiver
method map[string]*methodType // registered methods
}
func (s *service) Dispose(server *Server, sending *sync.Mutex, mtype *methodType, req *Header, argv, replyv reflect.Value, codec Codec) {
s.dispose(server.Free, sending, mtype, req, argv, replyv, codec)
}
func (s *service) dispose(free Free, sending *sync.Mutex, mtype *methodType, req *Header, argv, replyv reflect.Value, codec Codec) {
mtype.Lock()
mtype.numCalls++
mtype.Unlock()
function := mtype.method.Func
// Invoke the method, providing a new value for the reply.
returnValues := function.Call([]reflect.Value{s.rcvr, argv, replyv})
// The return value for the method is an error.
errInter := returnValues[0].Interface()
errmsg := ""
if errInter != nil {
errmsg = errInter.(error).Error()
}
free.sendResponse(sending, req, replyv.Interface(), codec, errmsg)
free.freeRequest(req)
}
<file_sep>/free.go
package prpc
import (
"sync"
"log"
)
type Free struct {
reqLock sync.Mutex // protects freeReq
freeReq *Header
respLock sync.Mutex // protects freeResp
freeResp *Header
}
func (free *Free) getRequest() *Header {
free.reqLock.Lock()
defer free.reqLock.Unlock()
req := free.freeReq
if req == nil {
req = new(Header)
} else {
free.freeReq = req.next
*req = Header{}
}
return req
}
func (free *Free) FreeRequest(req *Header) {
free.freeRequest(req)
}
func (free *Free) freeRequest(req *Header) {
free.reqLock.Lock()
defer free.reqLock.Unlock()
req.next = free.freeReq
free.freeReq = req
}
func (free *Free) getResponse() *Header {
free.respLock.Lock()
defer free.respLock.Unlock()
resp := free.freeResp
if resp == nil {
resp = new(Header)
} else {
free.freeResp = resp.next
*resp = Header{}
}
return resp
}
func (free *Free) freeResponse(resp *Header) {
free.respLock.Lock()
defer free.respLock.Unlock()
resp.next = free.freeResp
free.freeResp = resp
}
func (free *Free) SendResponse(sending *sync.Mutex, req *Header, reply interface{}, codec Codec, errmsg string) {
free.sendResponse(sending, req, reply, codec, errmsg)
}
func (free *Free) sendResponse(sending *sync.Mutex, req *Header, reply interface{}, codec Codec, errmsg string) {
resp := free.getResponse()
// Encode the response header
resp.ServiceMethod = req.ServiceMethod
if errmsg != "" {
resp.Error = errmsg
reply = invalidRequest
}
resp.Seq = req.Seq
resp.IsResponse = true
sending.Lock()
err := codec.Write(resp, reply)
if debugLog && err != nil {
log.Println("rpc: writing response:", err)
}
sending.Unlock()
free.freeResponse(resp)
}<file_sep>/call.go
package prpc
import(
"log"
)
// Call represents an active RPC.
type Call struct {
ServiceMethod string // The name of the service and method to call.
Args interface{} // The argument to the function (*struct).
Reply interface{} // The reply from the function (*struct).
Error error // After completion, the error status.
Done chan *Call // Strobes when call is complete.
}
func (call *Call) done() {
select {
case call.Done <- call:
// ok
default:
// We don't want to block here. It is the caller's responsibility to make
// sure the channel has enough buffer space. See comment in Go().
if debugLog {
log.Println("rpc: discarding Call reply due to insufficient Done chan capacity")
}
}
}
<file_sep>/client.go
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package prpc
import (
//"bufio"
"errors"
"log"
)
// ServerError represents an error that has been returned from
// the remote side of the RPC connection.
type ServerError string
func (e ServerError) Error() string {
return string(e)
}
var ErrShutdown = errors.New("connection is shut down")
// Client represents an RPC Client.
// There may be multiple outstanding Calls associated
// with a single Client, and a Client may be used by
// multiple goroutines simultaneously.
type Client struct {
codec Codec
Common
//CallbackMapLock sync.RWMutex
//CallbackMap map[string]func(*Client, Codec, Header) error
}
// NewClient returns a new Client to handle requests to the
// set of services at the other end of the connection.
// It adds a buffer to the write side of the connection so
// the header and payload are sent as a unit.
//func NewClient(conn io.ReadWriteCloser) *Client {
// encBuf := bufio.NewWriter(conn)
// client := &gobClientCodec{conn, gob.NewDecoder(conn), gob.NewEncoder(encBuf), encBuf}
// return NewClientWithCodec(client)
//}
// NewClientWithCodec is like NewClient but uses the specified
// codec to encode requests and decode responses.
func NewClientWithCodec(codec Codec) *Client {
client := &Client{
codec: codec,
Common:Common{
pending: make(map[uint64]*Call),
serviceMap: make(map[string]*service),
},
}
go client.ReadLoop(codec)
return client
}
// Call invokes the named function, waits for it to complete, and returns its error status.
func (client *Client) Call(serviceMethod string, args interface{}, reply interface{}) error {
call := <-client.Go(serviceMethod, args, reply, make(chan *Call, 1)).Done
return call.Error
}
// Go invokes the function asynchronously. It returns the Call structure representing
// the invocation. The done channel will signal when the call is complete by returning
// the same Call object. If done is nil, Go will allocate a new channel.
// If non-nil, done must be buffered or Go will deliberately crash.
func (client *Client) Go(serviceMethod string, args interface{}, reply interface{}, done chan *Call) *Call {
call := new(Call)
call.ServiceMethod = serviceMethod
call.Args = args
call.Reply = reply
if done == nil {
done = make(chan *Call, 10) // buffered.
} else {
// If caller passes done != nil, it must arrange that
// done has enough buffer for the number of simultaneous
// RPCs that will be using that channel. If the channel
// is totally unbuffered, it's best not to run at all.
if cap(done) == 0 {
log.Panic("rpc: done channel is unbuffered")
}
}
call.Done = done
client.send(call)
return call
}
func (client *Client) send(call *Call) {
client.hedMutex.Lock()
defer client.hedMutex.Unlock()
// Register this call.
client.mutex.Lock()
if client.shutdown || client.closing {
call.Error = ErrShutdown
client.mutex.Unlock()
call.done()
return
}
seq := client.seq
client.seq++
client.pending[seq] = call
client.mutex.Unlock()
// Encode and send the request.
client.header.Seq = seq
client.header.ServiceMethod = call.ServiceMethod
err := client.codec.Write(&client.header, call.Args)
if err != nil {
client.mutex.Lock()
call = client.pending[seq]
delete(client.pending, seq)
client.mutex.Unlock()
if call != nil {
call.Error = err
call.done()
}
}
}
//func (client *Client) ReadLoop() (err error){
// var header Header
// for err == nil {
// header = Header{}
// log.Println(1)
// err := client.codec.ReadHeader(&header)
// if err != nil {
// log.Println("[error] read header error:", err)
// break
// }
// if header.IsResponse {
// log.Println("Response")
// log.Println("response.ServiceMethod ", header.ServiceMethod)
// seq := header.Seq
// client.mutex.Lock()
// call := client.pending[seq]
// delete(client.pending, seq)
// client.mutex.Unlock()
//
// switch {
// case call == nil:
// // We've got no pending call. That usually means that
// // WriteRequest partially failed, and call was already
// // removed; response is a client telling us about an
// // error reading request body. We should still attempt
// // to read error body, but there's no one to give it to.
// err = client.codec.ReadBody(nil)
// if err != nil {
// err = errors.New("reading error body: " + err.Error())
// }
// case header.Error != "":
// // We've got an error response. Give this to the request;
// // any subsequent requests will get the ReadResponseBody
// // error if there is one.
// call.Error = ServerError(header.Error)
// err = client.codec.ReadBody(nil)
// if err != nil {
// err = errors.New("reading error body: " + err.Error())
// }
// call.done()
// default:
// err = client.codec.ReadBody(call.Reply)
// if err != nil {
// call.Error = errors.New("reading body " + err.Error())
// }
// call.done()
// }
// }else{
// log.Println("Request")
// sending := new(sync.Mutex)
// service, mtype, argv, replyv, keepReading, err := client.docodeReqHeader(&header)
// if err != nil {
// if !keepReading {
// return err
// }
// // send a response if we actually managed to read a header.
// if &header != nil {
// client.sendResponse(sending, &header, invalidRequest, client.codec, err.Error())
// client.freeRequest(&header)
// }
// return err
// }
// go service.dispose(client.Free, sending, mtype, &header, argv, replyv, client.codec)
// }
// }
// // Terminate pending calls.
// client.hedMutex.Lock()
// //defer client.hedMutex.Unlock()
// client.mutex.Lock()
// //defer client.mutex.Unlock()
// client.shutdown = true
// closing := client.closing
// if err == io.EOF {
// if closing {
// err = ErrShutdown
// } else {
// err = io.ErrUnexpectedEOF
// }
// }
// for _, call := range client.pending {
// call.Error = err
// call.done()
// }
// client.mutex.Unlock()
// client.hedMutex.Unlock()
// if err != io.EOF && !closing {
// log.Println("rpc: client protocol error:", err)
// }
// return err
//}
//func (client *Client) ReadRequest(codec Codec) (service *service, mtype *methodType, req *Header, argv, replyv reflect.Value, keepReading bool, err error) {
// return client.readRequest(codec)
//}
//func (client *Client) docodeReqHeader(header *Header) (service *service, mtype *methodType, argv, replyv reflect.Value, keepReading bool, err error) {
// service, mtype, keepReading, err = client.decodeRequestHeader(header)
// if err != nil {
// if !keepReading {
// return
// }
// // discard body
// client.codec.ReadBody(nil)
// return
// }
//
// // Decode the argument value.
// argIsValue := false // if true, need to indirect before calling.
// if mtype.ArgType.Kind() == reflect.Ptr {
// argv = reflect.New(mtype.ArgType.Elem())
// } else {
// argv = reflect.New(mtype.ArgType)
// argIsValue = true
// }
// // argv guaranteed to be a pointer now.
// if err = client.codec.ReadBody(argv.Interface()); err != nil {
// return
// }
// if argIsValue {
// argv = argv.Elem()
// }
//
// replyv = reflect.New(mtype.ReplyType.Elem())
// return
//}
//
//func (client *Client) decodeRequestHeader(req *Header) (service *service, mtype *methodType, keepReading bool, err error) {
// // Grab the request header.
// //req = client.free.getRequest()
// //
// //req = header
//
// // We read the header successfully. If we see an error now,
// // we can still recover and move on to the next request.
// keepReading = true
//
// dot := strings.LastIndex(req.ServiceMethod, ".")
// if dot < 0 {
// err = errors.New("rpc: service/method request ill-formed: " + req.ServiceMethod)
// return
// }
// serviceName := req.ServiceMethod[:dot]
// methodName := req.ServiceMethod[dot+1:]
//
// // Look up the request.
// client.mu.RLock()
// service = client.serviceMap[serviceName]
// client.mu.RUnlock()
// if service == nil {
// err = errors.New("rpc: can't find service " + req.ServiceMethod)
// return
// }
// mtype = service.method[methodName]
// if mtype == nil {
// err = errors.New("rpc: can't find method " + req.ServiceMethod)
// }
// return
//}
func (client *Client) Close() error {
client.mutex.Lock()
if client.closing {
client.mutex.Unlock()
return ErrShutdown
}
client.closing = true
client.mutex.Unlock()
return client.codec.Close()
}
<file_sep>/example/server_test.go
package example
import (
"log"
"time"
"net"
"testing"
"github.com/penguinn/prpc/example/proto"
"github.com/penguinn/prpc"
"github.com/penguinn/prpc/codec"
)
type Front struct {
}
func (t *Front) Mul(args *proto.ProtoArgs, reply *proto.ProtoReply) error {
reply.C = args.A * args.B
return nil
}
func Test_Server(t *testing.T) {
server := prpc.NewServer()
server.RegisterName("Front", new(Front))
listener, err := net.Listen("tcp", ":8081")
if err != nil{
log.Println("[error] Listen tcp error:", err)
}
for{
conn, err := listener.Accept()
if err != nil{
log.Println("[error] Accept tcp error:", err)
}
codecTemp := codec.NewCodec(conn)
go server.ReadLoop(codecTemp)
time.Sleep(time.Second*2)
args := new(proto.AddArgs)
args.A = 7
args.B = 9
reply := new(proto.AddReply)
server.Call("Sjy.Add",codecTemp, args, reply)
log.Println("##########",reply)
}
log.Println(1111)
}
<file_sep>/example/client_test.go
package example
import (
"log"
"net"
"testing"
"sync"
"github.com/penguinn/prpc/example/proto"
"github.com/penguinn/prpc"
"github.com/penguinn/prpc/codec"
)
type Sjy struct {
}
func(p *Sjy) Add(args *proto.AddArgs, reply *proto.AddReply) error{
reply.C = args.A + args.B
return nil
}
func Test_Client(t *testing.T) {
var wg sync.WaitGroup
conn, err := net.Dial("tcp", ":8081")
if err != nil{
log.Println("[error] dial tcp error:", err)
}
codecTemp := codec.NewCodec(conn)
client := prpc.NewClientWithCodec(codecTemp)
client.RegisterName("Sjy", new(Sjy))
args := new(proto.ProtoArgs)
args.A = 5
args.B = 6
reply := new(proto.ProtoReply)
err = client.Call("Front.Mul", args, reply)
if err != nil {
log.Println(err)
}
log.Println("######",reply)
wg.Add(1)
wg.Wait()
}
<file_sep>/codec/codec.go
package codec
import (
"bufio"
"fmt"
"io"
"sync"
"github.com/penguinn/prpc"
"github.com/golang/protobuf/proto"
"github.com/penguinn/prpc/codec/wirepb"
)
const defaultBufferSize = 4 * 1024
type serverCodec struct {
mu sync.Mutex // exclusive writer lock
resp wirepb.ResponseHeader
enc *Encoder
w *bufio.Writer
req wirepb.RequestHeader
dec *Decoder
c io.Closer
}
type codec struct{
mu sync.Mutex // exclusive writer lock
resp wirepb.ResponseHeader
enc *Encoder
w *bufio.Writer
req wirepb.RequestHeader
dec *Decoder
c io.Closer
}
func NewCodec(rwc io.ReadWriteCloser) prpc.Codec {
w := bufio.NewWriterSize(rwc, defaultBufferSize)
r := bufio.NewReaderSize(rwc, defaultBufferSize)
return &codec{
enc: NewEncoder(w),
w: w,
dec: NewDecoder(r),
c: rwc,
}
}
func(c *codec) Write(header *prpc.Header, body interface{}) error{
c.mu.Lock()
defer c.mu.Unlock()
if header.IsResponse{
c.resp.Method = header.ServiceMethod
c.resp.Seq = header.Seq
c.resp.Error = header.Error
c.resp.IsResponse = header.IsResponse
err := encode(c.enc, &c.resp)
if err != nil {
return err
}
if err = encode(c.enc, body); err != nil {
return err
}
err = c.w.Flush()
return err
}else {
c.req.Method = header.ServiceMethod
c.req.Seq = header.Seq
c.req.IsResponse = header.IsResponse
err := encode(c.enc, &c.req)
if err != nil {
return err
}
if err = encode(c.enc, body); err != nil {
return err
}
err = c.w.Flush()
return err
}
}
func(c *codec) ReadHeader(header *prpc.Header) (error){
c.resp.Reset()
if err := c.dec.Decode(&c.resp); err != nil {
return err
}
header.ServiceMethod = c.resp.Method
header.Seq = c.resp.Seq
header.Error = c.resp.Error
header.IsResponse = c.resp.IsResponse
return nil
}
func(c *codec) ReadBody(body interface{}) error{
if pb, ok := body.(proto.Message); ok {
return c.dec.Decode(pb)
}
return fmt.Errorf("%T does not implement proto.Message", body)
}
func(c *codec) Close() error{
return c.c.Close()
}
func encode(enc *Encoder, m interface{}) (err error) {
if pb, ok := m.(proto.Message); ok {
return enc.Encode(pb)
}
return fmt.Errorf("%T does not implement proto.Message", m)
}<file_sep>/server.go
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
/*
Package rpc provides access to the exported methods of an object across a
network or other I/O connection. A server registers an object, making it visible
as a service with the name of the type of the object. After registration, exported
methods of the object will be accessible remotely. A server may register multiple
objects (services) of different types but it is an error to register multiple
objects of the same type.
Only methods that satisfy these criteria will be made available for remote access;
other methods will be ignored:
- the method's type is exported.
- the method is exported.
- the method has two arguments, both exported (or builtin) types.
- the method's second argument is a pointer.
- the method has return type error.
In effect, the method must look schematically like
func (t *T) MethodName(argType T1, replyType *T2) error
where T, T1 and T2 can be marshaled by encoding/gob.
These requirements apply even if a different codec is used.
(In the future, these requirements may soften for custom codecs.)
The method's first argument represents the arguments provided by the caller; the
second argument represents the result parameters to be returned to the caller.
The method's return value, if non-nil, is passed back as a string that the client
sees as if created by errors.New. If an error is returned, the reply parameter
will not be sent back to the client.
The server may handle requests on a single connection by calling ServeConn. More
typically it will create a network listener and call Accept or, for an HTTP
listener, HandleHTTP and http.Serve.
A client wishing to use the service establishes a connection and then invokes
NewClient on the connection. The convenience function Dial (DialHTTP) performs
both steps for a raw network connection (an HTTP connection). The resulting
Client object has two methods, Call and Go, that specify the service and method to
call, a pointer containing the arguments, and a pointer to receive the result
parameters.
The Call method waits for the remote call to complete while the Go method
launches the call asynchronously and signals completion using the Call
structure's Done channel.
Unless an explicit codec is set up, package encoding/gob is used to
transport the data.
Here is a simple example. A server wishes to export an object of type Arith:
package server
type Args struct {
A, B int
}
type Quotient struct {
Quo, Rem int
}
type Arith int
func (t *Arith) Multiply(args *Args, reply *int) error {
*reply = args.A * args.B
return nil
}
func (t *Arith) Divide(args *Args, quo *Quotient) error {
if args.B == 0 {
return errors.New("divide by zero")
}
quo.Quo = args.A / args.B
quo.Rem = args.A % args.B
return nil
}
The server calls (for HTTP service):
arith := new(Arith)
rpc.Register(arith)
rpc.HandleHTTP()
l, e := net.Listen("tcp", ":1234")
if e != nil {
log.Fatal("listen error:", e)
}
go http.Serve(l, nil)
At this point, clients can see a service "Arith" with methods "Arith.Multiply" and
"Arith.Divide". To invoke one, a client first dials the server:
client, err := rpc.DialHTTP("tcp", serverAddress + ":1234")
if err != nil {
log.Fatal("dialing:", err)
}
Then it can make a remote call:
// Synchronous call
args := &server.Args{7,8}
var reply int
err = client.Call("Arith.Multiply", args, &reply)
if err != nil {
log.Fatal("arith error:", err)
}
fmt.Printf("Arith: %d*%d=%d", args.A, args.B, reply)
or
// Asynchronous call
quotient := new(Quotient)
divCall := client.Go("Arith.Divide", args, quotient, nil)
replyCall := <-divCall.Done // will be equal to divCall
// check errors, print, etc.
A server implementation will often provide a simple, type-safe wrapper for the
client.
*/
package prpc
import (
//"bufio"
"log"
//"net"
"reflect"
"unicode"
"unicode/utf8"
)
const (
// Defaults used by HandleHTTP
DefaultRPCPath = "/_goRPC_"
DefaultDebugPath = "/debug/rpc"
)
// Precompute the reflect type for error. Can't use error directly
// because Typeof takes an empty interface value. This is annoying.
var typeOfError = reflect.TypeOf((*error)(nil)).Elem()
var debugLog = false
// Server represents an RPC Server.
type Server struct {
Common
}
// NewServer returns a new Server.
func NewServer() *Server {
return &Server{Common:Common{serviceMap: make(map[string]*service), pending:make(map[uint64]*Call)}}
}
//func NewServerWithCodec(codec Codec) *Server{
// return &Server{serviceMap:make(map[string]*service), codec:codec}
//}
// DefaultServer is the default instance of *Server.
var DefaultServer = NewServer()
// Is this an exported - upper case - name?
func isExported(name string) bool {
rune, _ := utf8.DecodeRuneInString(name)
return unicode.IsUpper(rune)
}
// Is this type exported or a builtin?
func isExportedOrBuiltinType(t reflect.Type) bool {
for t.Kind() == reflect.Ptr {
t = t.Elem()
}
// PkgPath will be non-empty even for an exported type,
// so we need to check the type name as well.
return isExported(t.Name()) || t.PkgPath() == ""
}
// A value sent as a placeholder for the server's response value when the server
// receives an invalid request. It is never decoded by the client since the Response
// contains an error when it is used.
var invalidRequest = struct{}{}
func (m *methodType) NumCalls() (n uint) {
m.Lock()
n = m.numCalls
m.Unlock()
return n
}
func (server *Server) Go(serviceMethod string, codec Codec, args interface{}, reply interface{}, done chan *Call) *Call {
call := new(Call)
call.ServiceMethod = serviceMethod
call.Args = args
call.Reply = reply
if done == nil {
done = make(chan *Call, 10) // buffered.
} else {
// If caller passes done != nil, it must arrange that
// done has enough buffer for the number of simultaneous
// RPCs that will be using that channel. If the channel
// is totally unbuffered, it's best not to run at all.
if cap(done) == 0 {
log.Panic("rpc: done channel is unbuffered")
}
}
call.Done = done
server.send(call, codec)
return call
}
// Call invokes the named function, waits for it to complete, and returns its error status.
func (server *Server) Call(serviceMethod string, codec Codec, args interface{}, reply interface{}) error {
call := <-server.Go(serviceMethod, codec, args, reply, make(chan *Call, 1)).Done
return call.Error
}
func (server *Server) send(call *Call, codec Codec) {
server.hedMutex.Lock()
defer server.hedMutex.Unlock()
// Register this call.
server.mutex.Lock()
if server.shutdown || server.closing {
call.Error = ErrShutdown
server.mutex.Unlock()
call.done()
return
}
seq := server.seq
server.seq++
server.pending[seq] = call
server.mutex.Unlock()
// Encode and send the request.
server.header.Seq = seq
server.header.ServiceMethod = call.ServiceMethod
server.header.IsResponse = false
err := codec.Write(&server.header, call.Args)
if err != nil {
server.mutex.Lock()
call = server.pending[seq]
delete(server.pending, seq)
server.mutex.Unlock()
if call != nil {
call.Error = err
call.done()
}
}
}
|
2e10705ac4b6991114b68d876ea0d08f4194d698
|
[
"Markdown",
"Go"
] | 11
|
Go
|
penguinn/prpc
|
177e541b586ba2a08e61def09c9cd286f3f961d6
|
7ab392e5d8a800cdb780c2b5395b4f44792ecaf7
|
refs/heads/master
|
<file_sep>let fluffyBunny = document.querySelector('.paragraph');
let listUl = document.querySelector('ul');
fluffyBunny.addEventListener('mouseover', () => {
//change it to uppercase
fluffyBunny.textContent = fluffyBunny.textContent.toUpperCase();
});
fluffyBunny.addEventListener('mouseout', () => {
//change it to lowercase
fluffyBunny.textContent = fluffyBunny.textContent.toLowerCase();
});
listUl.addEventListener('click', (event)=>{
//click the up button
//move the list item up
//we are targeting an element with the tagName as button
if(event.target.tagName == 'BUTTON'){
//if that targeted button has a class name of up select it
if(event.target.className == 'up'){
let li = event.target.parentNode;
let prevLi = li.previousElementSibling;
let ul = li.parentNode;
ul.insertBefore(li, prevLi);
}
//bonus
//click remove button
//remove the list item selected
}
});
<file_sep>let invitees = [];
class Invitee {
constructor (name, preconfirmed = false, updateDisplay) {
this.name = name;
this.confirmed = preconfirmed;
this.add(updateDisplay);
}
add (shouldUpdateDisplay) {
invitees.push(this);
if (shouldUpdateDisplay !== false) {
updateDisplay();
}
}
remove () {
let index = invitees.indexOf(this);
if (index >= 0) {
invitees.splice(index, 1);
}
updateDisplay();
}
changeName (newName) {
this.name = newName;
}
changeStatus (newStatus) {
this.confirmed = newStatus;
}
}
function saveToPersistentStorage () {
localStorage.setItem('invitees', JSON.stringify(invitees));
}
function getFromPersistentStorage () {
let inviteeList = localStorage.getItem('invitees');
if (!inviteeList) {
inviteeList = [];
} else {
inviteeList = JSON.parse(inviteeList);
}
let results = [];
inviteeList.forEach((invitee) => {
results.push(new Invitee(invitee.name, false, false));
});
return results;
}
invitees = getFromPersistentStorage();
const form = document.getElementById('registrar');
const input = form.querySelector('input');
const submitButton = form.querySelector('button');
const mainDiv = document.querySelector('.main');
const ul = document.getElementById('invitedList');
const div = document.createElement('div');
const filterLabel = document.createElement('label');
const filterCheckBox = document.createElement('input');
function updateDisplay () {
saveToPersistentStorage();
// Remove Elements
while (ul.firstChild) {
ul.removeChild(ul.firstChild);
}
// Create New Elements
invitees.forEach((invitee) => {
ul.appendChild(createInviteeLI(invitee));
});
}
function createInviteeLI (inviteeObject) {
// Create Invitee List Item
const invitee = document.createElement('li');
// Create Name Element
const nameEl = document.createElement('h4');
nameEl.innerText = inviteeObject.name;
invitee.appendChild(nameEl);
// Create Label
const confirmedLabel = document.createElement('label');
confirmedLabel.innerText = 'Confirmed';
// Create and Append Checkbox to Label
const confirmedCB = document.createElement('input');
confirmedCB.type = "checkbox";
confirmedCB.name = 'cb-' + name;
confirmedLabel.appendChild(confirmedCB);
// Append Label to Invitee List Item
invitee.appendChild(confirmedLabel);
const removeButton = document.createElement('button')
removeButton.innerText = 'Remove';
removeButton.addEventListener('click', () => { inviteeObject.remove() });
invitee.appendChild(removeButton);
return invitee;
}
function removeInviteeLI (e) {
// Grab which LI the button is part of
const clickTarget = e.target;
const li = clickTarget.parentNode;
const ul = li.parentNode;
// Remove the li from the ul
ul.removeChild(li);
}
document.addEventListener('DOMContentLoaded', () => {
updateDisplay();
//Click submit
submitButton.addEventListener('click', (e)=>{
// WAIT DON'T DO WHAT YOU NORMALLY DO
e.preventDefault();
//take what's inside input box
let inviteeName = input.value;
//display it in invitedList ul
new Invitee(inviteeName);
});
});
<file_sep>document.addEventListener("DOMContentLoaded", function(){
var repoNames = [];
fetch('https://api.github.com/users/SparksD2145/repos')
.then(function(response){
return response.json();
})
.then(function(myJson){
var list = document.getElementById("list");
myJson.forEach(function(item){
repoNames.push(item.name);
list.innerHTML += "<li>" + item.name + "</li>";
})
});
fetch('https://api.github.com/users/SparksD2145')
.then(function (response) {
return response.json();
})
.catch(function () {
document.getElementById('user').innerHTML = '<h1 style="color: red;">ERROR LOADING USER DATA</h1>';
})
.then(function (responseJson) {
var userJson = JSON.stringify(responseJson, null, 4);
var template = `
<h1>${responseJson.login}</h1>
<img width="100" src="${responseJson.avatar_url}" />
<br /><pre>${userJson}</pre>
`;
document.getElementById('user').innerHTML = template;
var test = document.getElementById('test');
test.href = responseJson.html_url;
test.innerHTML = responseJson.html_url;
});
})
<file_sep>//The Feed your Dog Web App
//Variables:
//Initial Amount
let scoobySnacks = 100;
//Timer
let time = "6pm";
//Bowl
let foodLeftInBowl = 0;
//Quantity released each time
let amountReleased = 5;
//Dog
let dog;
//Amount eaten by dog
let amountEaten;
//Check Bowl
function checkBowl(){
if(foodLeftInBowl <= 10 && foodLeftInBowl >= 0) {
//We add dat food fam
foodLeftInBowl += amountReleased;
//Subtract amount released from ScoobySnacks
scoobySnacks = scoobySnacks - amountReleased;
console.log(foodLeftInBowl);
console.log(scoobySnacks);
}
else{
return false;
}
}
function amountEatenByDog(){
//amount in the bowl
//randomly eat a certain amount 1-4
//console.log to check amount eaten
}
//Function that releases the food based on time of day
function whatTime() {
let initialQuestion = prompt("What time is it?");
if (initialQuestion == time) {
//true
//Check the bowl
checkBowl();
//Dog eat
}
else {
//false
alert("Sorry homie you can't eat");
}
}
//calling the function
//eliminate this and create button on front end
whatTime();
|
f9d8ce55f15c590b42a9d2d3bcf5b56607b94c0d
|
[
"JavaScript"
] | 4
|
JavaScript
|
FCC-Dallas/JavaScript_Class_Projects
|
aae422cf51546863fba5ab592caffc37a4a9423a
|
93e5e47531e2cdd55ec87f009701a7206c602916
|
refs/heads/master
|
<file_sep>#test comment
# Grid-Mapping
Test programs for grid object identification and mapping
#test comment 2 - gd<file_sep>import AstarAlgo
c=1
b_object = []
temp_object = []
output_1 = [] # first required output
output_2 = {}
def first_out(board): # extract all the obstacles' and objects' positions
c = 1
for block in board:
if block != ('NoShape',):
x = c % 10
y = c / 10
output_1.append((x, y))
temp_object.append(block)
block += (c,)
b_object.append(block)
c += 1
def do_obstacle(board, tmap):
c = 1
for block in board:
if block == ('black', '4-sided', 3136.0):
x = c % 10
y = c / 10
tmap[y][x] = 1
c += 1
return tmap
def do_path(board):
for block in board:
if block[0:2] != ('black', '4-sided',):
xA = block[3] % 10
yA = block[3] / 10
output_2[(xA, yA)] = [] # required dictionary, but its only updating the keys
<file_sep>import cv2
from Integrated import *
from AstarAlgo import *
'''
Program for extracting grid boxes from the grid,
and identify the shape, area and color of the object
'''
def check_task(img_task,gray_task,x):
counter = ''
img_extract_size = 60
img_extract_adjust = 0
check_value = []
str_temp = ""
for column in range(0, x):
for row in range(0, x):
i1 = column * img_extract_size + img_extract_adjust
i2 = (column + 1) * img_extract_size - img_extract_adjust
j1 = row * img_extract_size + img_extract_adjust
j2 = (row + 1) * img_extract_size - img_extract_adjust
img = img_task[i1:i2, j1:j2]
gray = gray_task[i1:i2, j1:j2]
color_value = img[30, 30]
if (color_value[0] < 10 and color_value[1] < 10 and color_value[2] > 240):
str_temp = ("red",)
elif (color_value[0] > 240 and color_value[1] < 10 and color_value[2] < 10):
str_temp = ("blue",)
elif (color_value[0] < 10 and color_value[1] > 240 and color_value[2] < 10):
str_temp = ("green",)
elif (color_value[0] < 10 and color_value[1] > 240 and color_value[2] > 240):
str_temp = ("yellow",)
elif (color_value[0] < 10 and color_value[1] < 10 and color_value[2] < 10):
str_temp = ("black",)
ret, thresh = cv2.threshold(gray, 160, 255, 1)
contours, h = cv2.findContours(thresh, 1, 2)
if len(contours) == 0:
str_temp = ("NoShape",)
else:
for cnt in contours:
if counter == str(i1) + str(j1):
break
approx = cv2.approxPolyDP(cnt, 0.0107 * cv2.arcLength(cnt, True), True)
if len(approx) == 3:
str_temp += ("Triangle",)
elif len(approx) == 4:
str_temp += ("4-sided",)
elif len(approx) > 10:
str_temp += ("Circle",)
cv2.imshow("win" + counter, thresh)
counter = str(i1) + str(j1)
str_temp +=(cv2.contourArea(cnt),)
check_value.append(str_temp)
return check_value
img_base_1 = cv2.imread('test_image3.jpg')
gray_base_1 = cv2.imread('test_image3.jpg', 0)
board_values = check_task(img_base_1, gray_base_1, 10)
#print b_object
first_out(board_values)
print output_1
print b_object
the_map = do_obstacle(board_values, the_map)
#do_path(b_object, the_map)
find_path(2, 8, 6, 5)
#printMap()
do_path(b_object)
print output_2.keys()
k = cv2.waitKey(0) & 0xFF
if k == 27:
cv2.destroyAllWindows()
<file_sep>'''
* Team Id : eYRC-LM#2719
* Author List : <NAME>, <NAME>, <NAME>
* Filename: Integrated_2.py
* Theme: Launch a Module
* Functions:
* updatePriority(), nextdistance(), estimate(), pathfind(), find_path(), printMap()
* check_task(), first_out(),do_obstacle(), do_path(), xyz()
* Global Variables: c, b_object,output_1,output_2,the_map,row, img_base_1, gray_base_1, board_values, key_val
'''
import cv2
from heapq import heappush, heappop # for priority queue
import math
import time
#variables declaration
c = 1 #counter variable
b_object = []
temp_object = []
output_1 = [] # first required output (list)
output_2 = {} # second required ouput (dictionary)
directions = 4 # number of possible directions to move on the map
if directions == 4:
dx = [1, 0, -1, 0]
dy = [0, 1, 0, -1]
elif directions == 8:
dx = [1, 1, 0, -1, -1, -1, 0, 1]
dy = [0, 1, 1, 1, 0, -1, -1, -1]
# map matrix
n = 10 # horizontal size
m = 10 # vertical size
the_map = []
row = [0] * n
for i in range(m):
the_map.append(list(row))
# extract a list of colour, shape and area from the image for all objects and obstacles
def check_task(img_task,gray_task,x):
counter = ''
img_extract_size = 60
img_extract_adjust = 0
check_value = []
str_temp = ""
for column in range(0, x):
for row in range(0, x):
i1 = column * img_extract_size + img_extract_adjust
i2 = (column + 1) * img_extract_size - img_extract_adjust
j1 = row * img_extract_size + img_extract_adjust
j2 = (row + 1) * img_extract_size - img_extract_adjust
img = img_task[i1:i2, j1:j2]
gray = gray_task[i1:i2, j1:j2]
color_value = img[30, 30]
if (color_value[0] < 10 and color_value[1] < 10 and color_value[2] > 240):
str_temp = ("red",)
elif (color_value[0] > 240 and color_value[1] < 10 and color_value[2] < 10):
str_temp = ("blue",)
elif (color_value[0] < 10 and color_value[1] > 240 and color_value[2] < 10):
str_temp = ("green",)
elif (color_value[0] < 10 and color_value[1] > 240 and color_value[2] > 240):
str_temp = ("yellow",)
elif (color_value[0] < 10 and color_value[1] < 10 and color_value[2] < 10):
str_temp = ("black",)
ret, thresh = cv2.threshold(gray, 160, 255, 1)
contours, h = cv2.findContours(thresh, 1, 2)
if len(contours) == 0:
str_temp = ("NoShape",)
else:
for cnt in contours:
if counter == str(i1) + str(j1):
break
approx = cv2.approxPolyDP(cnt, 0.0107 * cv2.arcLength(cnt, True), True)
if len(approx) == 3:
str_temp += ("Triangle",)
elif len(approx) == 4:
str_temp += ("4-sided",)
elif len(approx) > 10:
str_temp += ("Circle",)
cv2.imshow("win" + counter, thresh)
counter = str(i1) + str(j1)
str_temp +=(cv2.contourArea(cnt),)
check_value.append(str_temp)
return check_value
#node creation
class node:
# current position
xPos = 0
yPos = 0
# total distance already travelled to reach the node
distance = 0
# priority = distance + remaining distance estimate
priority = 0 # smaller: higher priority
def __init__(self, xPos, yPos, distance, priority):
self.xPos = xPos
self.yPos = yPos
self.distance = distance
self.priority = priority
def __lt__(self, other): # for priority queue
return self.priority < other.priority
def updatePriority(self, xDest, yDest):
self.priority = self.distance + self.estimate(xDest, yDest) * 10 # A*
# give better priority to going straight instead of diagonally
def nextdistance(self, i): # i: direction
if i % 2 == 0:
self.distance += 10
else:
self.distance += 14
# Estimation function for the remaining distance to the goal.
def estimate(self, xDest, yDest):
xd = xDest - self.xPos
yd = yDest - self.yPos
# Euclidian Distance
d = math.sqrt(xd * xd + yd * yd)
# Manhattan distance
# d = abs(xd) + abs(yd)
# Chebyshev distance
# d = max(abs(xd), abs(yd))
return (d)
# A-star algorithm.
# Path returned will be a string of digits of directions.
def pathFind(the_map, directions, dx, dy, xStart, yStart, xFinish, yFinish):
closed_nodes_map = [] # map of closed (tried-out) nodes
open_nodes_map = [] # map of open (not-yet-tried) nodes
dir_map = [] # map of directions
row = [0] * n
for i in range(m): # create 2d arrays
closed_nodes_map.append(list(row))
open_nodes_map.append(list(row))
dir_map.append(list(row))
pq = [[], []] # priority queues of open (not-yet-tried) nodes
pqi = 0 # priority queue index
# create the start node and push into list of open nodes
n0 = node(xStart, yStart, 0, 0)
n0.updatePriority(xFinish, yFinish)
heappush(pq[pqi], n0)
open_nodes_map[yStart][xStart] = n0.priority # mark it on the open nodes map
# A* search
while len(pq[pqi]) > 0:
# get the current node w/ the highest priority
# from the list of open nodes
n1 = pq[pqi][0] # top node
n0 = node(n1.xPos, n1.yPos, n1.distance, n1.priority)
x = n0.xPos
y = n0.yPos
heappop(pq[pqi]) # remove the node from the open list
open_nodes_map[y][x] = 0
# mark it on the closed nodes map
closed_nodes_map[y][x] = 1
# quit searching when the goal state is reached
# if n0.estimate(xFinish, yFinish) == 0:
if x == xFinish and y == yFinish:
# generate the path from finish to start
# by following the directions
path = ''
while not (x == xStart and y == yStart):
j = dir_map[y][x]
c = str((j + directions / 2) % directions)
path = c + path
x += dx[j]
y += dy[j]
return path
# generate moves (child nodes) in all possible directions
for i in range(directions):
xdx = x + dx[i]
ydy = y + dy[i]
if not (xdx < 0 or xdx > n - 1 or ydy < 0 or ydy > m - 1
or the_map[ydy][xdx] == 1 or closed_nodes_map[ydy][xdx] == 1):
# generate a child node
m0 = node(xdx, ydy, n0.distance, n0.priority)
m0.nextdistance(i)
m0.updatePriority(xFinish, yFinish)
# if it is not in the open list then add into that
if open_nodes_map[ydy][xdx] == 0:
open_nodes_map[ydy][xdx] = m0.priority
heappush(pq[pqi], m0)
# mark its parent node direction
dir_map[ydy][xdx] = (i + directions / 2) % directions
elif open_nodes_map[ydy][xdx] > m0.priority:
# update the priority info
open_nodes_map[ydy][xdx] = m0.priority
# update the parent direction info
dir_map[ydy][xdx] = (i + directions / 2) % directions
# replace the node
# by emptying one pq to the other one
# except the node to be replaced will be ignored
# and the new node will be pushed in instead
while not (pq[pqi][0].xPos == xdx and pq[pqi][0].yPos == ydy):
heappush(pq[1 - pqi], pq[pqi][0])
heappop(pq[pqi])
heappop(pq[pqi]) # remove the wanted node
# empty the larger size pq to the smaller one
if len(pq[pqi]) > len(pq[1 - pqi]):
pqi = 1 - pqi
while len(pq[pqi]) > 0:
heappush(pq[1 - pqi], pq[pqi][0])
heappop(pq[pqi])
pqi = 1 - pqi
heappush(pq[pqi], m0) # add the better node instead
return '' # no route found
#path finding function
def find_path((xA, yA), temp):
#(xA, yA, xB, yB) = (0, 0, 0, 0)
xB = temp[0][0]
yB = temp[0][1]
print 'Map Size (X,Y): ', n, m
print 'Start: ', xA, yA
print 'Finish: ', xB, yB
t = time.time()
route = pathFind(the_map, directions, dx, dy, xA, yA, xB, yB)
print 'Time to generate the route (s): ', time.time() - t
print 'Route:', route
print len(route)
route1 = xyz(xA, yA, route)
output_2[(xA, yA)].append(route1)
output_2[(xA, yA)].append(len(route1))
# mark the route on the map
if len(route) > 0:
x = xA
y = yA
the_map[y][x] = 2
for i in range(len(route)):
j = int(route[i])
x += dx[j]
y += dy[j]
the_map[y][x] = 3
the_map[y][x] = 4
# display the map with the route
def printMap():
print 'Map:'
for y in range(m):
for x in range(n):
xy = the_map[y][x]
if xy == 0:
print '.', # space
elif xy == 1:
print 'O', # obstacle
elif xy == 2:
print 'S', # start
elif xy == 3:
print 'R', # route
elif xy == 4:
print 'F', # finish
print
# extract all the obstacles' and objects' positions
def first_out(board):
c = 1
for block in board:
if block != ('NoShape',):
x = c % 10
y = c / 10
output_1.append((x, y))
temp_object.append(block)
block += (c,)
b_object.append(block)
c += 1
# present obstacles on map
def do_obstacle(board, tmap):
c = 1
for block in board:
if block == ('black', '4-sided', 3136.0):
x = c % 10
y = c / 10
tmap[y][x] = 1
c += 1
return tmap
# takes the b_object as input and
# updates the required dictionary with keys only (starting positions)
def do_path(board):
for block in board:
if block[0:2] != ('black', '4-sided',):
for block2 in board:
if block != block2 and block[0:3] == block2[0:3]:
output_2[(block[3] % 10, block[3] / 10)] = [(block2[3] % 10, block2[3] / 10)]
def xyz(xA,yA,route1):
path_temp = []
xA1 = xA
yA1 = yA
for i in route1:
if i == '0': # right
xA1 += 1
elif i == '1': # down
yA1 += 1
elif i == '2': # left
xA1 -= 1
elif i == '3': # up
yA1 -= 1
path_temp.append((xA1,yA1))
return path_temp
# MAIN
img_base_1 = cv2.imread('test_image3.jpg')
gray_base_1 = cv2.imread('test_image3.jpg', 0)
board_values = check_task(img_base_1, gray_base_1, 10)
first_out(board_values) # list of all objects
print output_1
the_map = do_obstacle(board_values, the_map)
do_path(b_object)
key_val = list(output_2.keys())
for i in range(len(key_val)):
find_path(key_val[i], output_2[key_val[i]])
printMap()
print output_2
k = cv2.waitKey(0) & 0xFF
if k == 27:
cv2.destroyAllWindows()
<file_sep># -*- coding: utf-8 -*-
'''
**************************************************************************
* IMAGE PROCESSING (e-Yantra 2016)
* ================================
* This software is intended to teach image processing concepts
*
* Author: e-Y<NAME>, Department of Computer Science
* and Engineering, Indian Institute of Technology Bombay.
*
* Software released under Creative Commons CC BY-NC-SA
*
* For legal information refer to:
* http://creativecommons.org/licenses/by-nc-sa/4.0/legalcode
*
*
* This software is made available on an “AS IS WHERE IS BASIS”.
* Licensee/end user indemnifies and will keep e-Yantra indemnified from
* any and all claim(s) that emanate from the use of the Software or
* breach of the terms of this agreement.
*
* e-Yantra - An MHRD project under National Mission on Education using
* ICT(NMEICT)
*
* ---------------------------------------------------
* Theme: Launch a Module
* Filename: task2_main.py
* Version: 1.0.0
* Date: November 28, 2016
* How to run this file: python task2_main.py
* Author: e-Yantra Project, Department of Computer Science and Engineering, Indian Institute of Technology Bombay.
* ---------------------------------------------------
* ====================== GENERAL Instruction =======================
* 1. Check for "DO NOT EDIT" tags - make sure you do not change function name of main().
* 2. Return should be a list named occupied_grids and a dictionary named planned_path.
* 3. Do not keep uncessary print statement, imshow() functions in final submission that you submit
* 4. Do not change the file name
* 5. Your Program will be tested through code test suite designed and graded based on number of test cases passed
**************************************************************************
'''
import cv2
import numpy as np
# ******* WRITE YOUR FUNCTION, VARIABLES etc HERE
'''
* Team Id : eYRC-LM#2719
* Author List : <NAME>, <NAME>, <NAME>
* Filename: Integrated_2.py
* Theme: Launch a Module
* Functions:
* updatePriority(), nextdistance(), estimate(), pathfind(), find_path(), printMap()
* check_task(), first_out(),do_obstacle(), do_path(), xyz()
* Global Variables: c, b_object,output_1,output_2,the_map,row, img_base_1, gray_base_1, board_values, key_val
'''
import cv2
from heapq import heappush, heappop # for priority queue
import math
import time
# variables declaration
c = 1 # counter variable
b_object = []
temp_object = []
output_1 = [] # first required output (list)
output_2 = {} # second required ouput (dictionary)
directions = 4 # number of possible directions to move on the map
if directions == 4:
dx = [1, 0, -1, 0]
dy = [0, 1, 0, -1]
elif directions == 8:
dx = [1, 1, 0, -1, -1, -1, 0, 1]
dy = [0, 1, 1, 1, 0, -1, -1, -1]
# map matrix
n = 10 # horizontal size
m = 10 # vertical size
the_map = []
row = [0] * n
for i in range(m):
the_map.append(list(row))
# extract a list of colour, shape and area from the image for all objects and obstacles
def check_task(img_task, gray_task, x):
counter = ''
img_extract_size = 60
img_extract_adjust = 0
check_value = []
str_temp = ""
for column in range(0, x):
for row in range(0, x):
i1 = column * img_extract_size + img_extract_adjust
i2 = (column + 1) * img_extract_size - img_extract_adjust
j1 = row * img_extract_size + img_extract_adjust
j2 = (row + 1) * img_extract_size - img_extract_adjust
img = img_task[i1:i2, j1:j2]
gray = gray_task[i1:i2, j1:j2]
color_value = img[30, 30]
if (color_value[0] < 10 and color_value[1] < 10 and color_value[2] > 240):
str_temp = ("red",)
elif (color_value[0] > 240 and color_value[1] < 10 and color_value[2] < 10):
str_temp = ("blue",)
elif (color_value[0] < 10 and color_value[1] > 240 and color_value[2] < 10):
str_temp = ("green",)
elif (color_value[0] < 10 and color_value[1] > 240 and color_value[2] > 240):
str_temp = ("yellow",)
elif (color_value[0] < 10 and color_value[1] < 10 and color_value[2] < 10):
str_temp = ("black",)
ret, thresh = cv2.threshold(gray, 160, 255, 1)
contours, h = cv2.findContours(thresh, 1, 2)
if len(contours) == 0:
str_temp = ("NoShape",)
else:
for cnt in contours:
if counter == str(i1) + str(j1):
break
approx = cv2.approxPolyDP(cnt, 0.0107 * cv2.arcLength(cnt, True), True)
if len(approx) == 3:
str_temp += ("Triangle",)
elif len(approx) == 4:
str_temp += ("4-sided",)
elif len(approx) > 10:
str_temp += ("Circle",)
cv2.imshow("win" + counter, thresh)
counter = str(i1) + str(j1)
str_temp += (cv2.contourArea(cnt),)
check_value.append(str_temp)
return check_value
# node creation
class node:
# current position
xPos = 0
yPos = 0
# total distance already travelled to reach the node
distance = 0
# priority = distance + remaining distance estimate
priority = 0 # smaller: higher priority
def __init__(self, xPos, yPos, distance, priority):
self.xPos = xPos
self.yPos = yPos
self.distance = distance
self.priority = priority
def __lt__(self, other): # for priority queue
return self.priority < other.priority
def updatePriority(self, xDest, yDest):
self.priority = self.distance + self.estimate(xDest, yDest) * 10 # A*
# give better priority to going straight instead of diagonally
def nextdistance(self, i): # i: direction
if i % 2 == 0:
self.distance += 10
else:
self.distance += 14
# Estimation function for the remaining distance to the goal.
def estimate(self, xDest, yDest):
xd = xDest - self.xPos
yd = yDest - self.yPos
# Euclidian Distance
d = math.sqrt(xd * xd + yd * yd)
# Manhattan distance
# d = abs(xd) + abs(yd)
# Chebyshev distance
# d = max(abs(xd), abs(yd))
return (d)
# A-star algorithm.
# Path returned will be a string of digits of directions.
def pathFind(the_map, directions, dx, dy, xStart, yStart, xFinish, yFinish):
closed_nodes_map = [] # map of closed (tried-out) nodes
open_nodes_map = [] # map of open (not-yet-tried) nodes
dir_map = [] # map of directions
row = [0] * n
for i in range(m): # create 2d arrays
closed_nodes_map.append(list(row))
open_nodes_map.append(list(row))
dir_map.append(list(row))
pq = [[], []] # priority queues of open (not-yet-tried) nodes
pqi = 0 # priority queue index
# create the start node and push into list of open nodes
n0 = node(xStart, yStart, 0, 0)
n0.updatePriority(xFinish, yFinish)
heappush(pq[pqi], n0)
open_nodes_map[yStart][xStart] = n0.priority # mark it on the open nodes map
# A* search
while len(pq[pqi]) > 0:
# get the current node w/ the highest priority
# from the list of open nodes
n1 = pq[pqi][0] # top node
n0 = node(n1.xPos, n1.yPos, n1.distance, n1.priority)
x = n0.xPos
y = n0.yPos
heappop(pq[pqi]) # remove the node from the open list
open_nodes_map[y][x] = 0
# mark it on the closed nodes map
closed_nodes_map[y][x] = 1
# quit searching when the goal state is reached
# if n0.estimate(xFinish, yFinish) == 0:
if x == xFinish and y == yFinish:
# generate the path from finish to start
# by following the directions
path = ''
while not (x == xStart and y == yStart):
j = dir_map[y][x]
c = str((j + directions / 2) % directions)
path = c + path
x += dx[j]
y += dy[j]
return path
# generate moves (child nodes) in all possible directions
for i in range(directions):
xdx = x + dx[i]
ydy = y + dy[i]
if not (xdx < 0 or xdx > n - 1 or ydy < 0 or ydy > m - 1
or the_map[ydy][xdx] == 1 or closed_nodes_map[ydy][xdx] == 1):
# generate a child node
m0 = node(xdx, ydy, n0.distance, n0.priority)
m0.nextdistance(i)
m0.updatePriority(xFinish, yFinish)
# if it is not in the open list then add into that
if open_nodes_map[ydy][xdx] == 0:
open_nodes_map[ydy][xdx] = m0.priority
heappush(pq[pqi], m0)
# mark its parent node direction
dir_map[ydy][xdx] = (i + directions / 2) % directions
elif open_nodes_map[ydy][xdx] > m0.priority:
# update the priority info
open_nodes_map[ydy][xdx] = m0.priority
# update the parent direction info
dir_map[ydy][xdx] = (i + directions / 2) % directions
# replace the node
# by emptying one pq to the other one
# except the node to be replaced will be ignored
# and the new node will be pushed in instead
while not (pq[pqi][0].xPos == xdx and pq[pqi][0].yPos == ydy):
heappush(pq[1 - pqi], pq[pqi][0])
heappop(pq[pqi])
heappop(pq[pqi]) # remove the wanted node
# empty the larger size pq to the smaller one
if len(pq[pqi]) > len(pq[1 - pqi]):
pqi = 1 - pqi
while len(pq[pqi]) > 0:
heappush(pq[1 - pqi], pq[pqi][0])
heappop(pq[pqi])
pqi = 1 - pqi
heappush(pq[pqi], m0) # add the better node instead
return '' # no route found
# path finding function
def find_path((xA, yA), temp):
# (xA, yA, xB, yB) = (0, 0, 0, 0)
xB = temp[0][0]
yB = temp[0][1]
print 'Map Size (X,Y): ', n, m
print 'Start: ', xA, yA
print 'Finish: ', xB, yB
t = time.time()
route = pathFind(the_map, directions, dx, dy, xA, yA, xB, yB)
print 'Route : ',
for i in route:
if i == '0': # right
print 'right ',
elif i == '1': # down
print 'down ',
elif i == '2': # left
print 'left ',
elif i == '3': # up
print 'up ',
print
print 'Length =',len(route)
print
print
route1 = xyz(xA, yA, route)
output_2[(xA, yA)].append(route1)
output_2[(xA, yA)].append(len(route1))
# mark the route on the map
if len(route) > 0:
x = xA
y = yA
the_map[y][x] = 2
for i in range(len(route)):
j = int(route[i])
x += dx[j]
y += dy[j]
the_map[y][x] = 3
the_map[y][x] = 4
# display the map with the route
def printMap():
print 'Map:'
for y in range(m):
for x in range(n):
xy = the_map[y][x]
if xy == 0:
print '.', # space
elif xy == 1:
print 'O', # obstacle
elif xy == 2:
print 'S', # start
elif xy == 3:
print 'R', # route
elif xy == 4:
print 'F', # finish
print
# extract all the obstacles' and objects' positions
def first_out(board):
c = 1
for block in board:
if block != ('NoShape',):
x = c % 10
y = c / 10
output_1.append((x, y))
temp_object.append(block)
block += (c,)
b_object.append(block)
c += 1
# present obstacles on map
def do_obstacle(board, tmap):
c = 1
for block in board:
if block == ('black', '4-sided', 3136.0):
x = c % 10
y = c / 10
tmap[y][x] = 1
c += 1
return tmap
# takes the b_object as input and
# updates the required dictionary with keys only (starting positions)
def do_path(board):
for block in board:
if block[0:2] != ('black', '4-sided',):
for block2 in board:
if block != block2 and block[0:3] == block2[0:3]:
output_2[(block[3] % 10, block[3] / 10 + 1)] = [(block2[3] % 10, block2[3] / 10 + 1)]
# else:
# output_2[(block[3] % 10, block[3] / 10 + 1)] = ['NO MATCH']
def xyz(xA, yA, route1):
path_temp = []
xA1 = xA
yA1 = yA
for i in route1:
if i == '0': # right
xA1 += 1
elif i == '1': # down
yA1 += 1
elif i == '2': # left
xA1 -= 1
elif i == '3': # up
yA1 -= 1
path_temp.append((xA1, yA1))
return path_temp
def main(image_filename):
"""
This function is the main program which takes image of test_images as argument.
Team is expected to insert their part of code as required to solve the given
task (function calls etc).
***DO NOT EDIT THE FUNCTION NAME. Leave it as main****
Function name: main()
******DO NOT EDIT name of these argument*******
Input argument: image_filename
Return:
1 - List of tuples which is the coordinates for occupied grid. See Task2_Description for detail.
2 - Dictionary with information of path. See Task2_Description for detail.
"""
occupied_grids = [] # List to store coordinates of occupied grid -- DO NOT CHANGE VARIABLE NAME
planned_path = {} # Dictionary to store information regarding path planning -- DO NOT CHANGE VARIABLE NAME
##### WRITE YOUR CODE HERE - STARTS
img_base_1 = cv2.imread(image_filename)
gray_base_1 = cv2.imread(image_filename, 0)
board_values = check_task(img_base_1, gray_base_1, 10)
first_out(board_values) # list of all objects
print output_1
do_path(b_object)
key_val = list(output_2.keys())
for i in range(len(key_val)):
if output_2[key_val[i]] == ['NO MATCH']:
output_2[key_val[i]].append([])
output_2[key_val[i]].append(0)
else:
find_path(key_val[i], output_2[key_val[i]])
# cv2.imshow("board_filepath - press Esc to close",cv2.imread(board_filepath)) - For check - remove
# cv2.imshow("container_filepath - press Esc to close",cv2.imread(container_filepath))
# #### NO EDIT AFTER THIS
# DO NOT EDIT
# return Expected output, which is a list of tuples. See Task1_Description for detail.
return occupied_grids, planned_path
'''
Below part of program will run when ever this file (task1_main.py) is run directly from terminal/Idle prompt.
'''
if __name__ == '__main__':
# change filename to check for other images
image_filename = "test_images/test_image3.jpg"
main(image_filename)
cv2.waitKey(0)
cv2.destroyAllWindows()
|
66d6eba6a4ba8129fcdff95a6b1af3371c604884
|
[
"Markdown",
"Python"
] | 5
|
Markdown
|
sudarshan-samal64/Grid-Mapping
|
b8a4b266acbd26669d419901c4073fe1fcba44f3
|
f47624e498bbf7a00925330cb80bed6a9b773c01
|
refs/heads/master
|
<repo_name>royaghorashi62/work-sample<file_sep>/Dissertation/C++.cpp
//Gerrymandring // FORMING WISCONSIN POLITICAL DISTRICT
#include <cmath>
#include <vector>
#include <cstdlib>//needed for rand()
#include <ctime>//needed for time()
#include <math.h>//needed for ceil()
#include <cstdlib>
//#include<stdio.h>
#include < ilconcert/iloenv.h>
#include <ilcplex/ilocplex.h>//needed for CPLEX
#include <fstream>
#include <algorithm>
#include <iterator>
#include <iostream>
#include <array>
ILOSTLBEGIN
#define RC_EPS 1.0e-6
static void readData1 (const char* filename, IloInt& noDistricts,IloNumArray& subPop);
static void readData2 (const char* filename, IloNumArray& democrats );
static void readData3 (const char* filename, IloNumArray& republicans );
static void readData4 (const char* filename, IloNumArray& area );
static void readData5 (const char* filename, IloNumArray& perimeter );
static void readData6 (const char* filename, IloNumArray& voter );
static void readData7 (const char* filename, IloNumArray& amount );
static void readData8 (const char* filename, IloNumArray2& bLength );
static void readData9 (const char* filename, IloNumArray2& adjacency );
static void readData10 (const char* filename, IloNumArray& black );
static void readData11 (const char* filename, IloNumArray& hispanic );
static void readData12 (const char* filename, IloNumArray& address );
static void readData13 (const char* filename, IloNumArray& senAddress );
static void readData14 (const char* filename, IloNumArray2& split );
// Definition of vectos for checking contiguity
static vector<int> traversed_nodes;
static vector<int> districts_to_check_contiguity;
static vector<int> subs_in_district;
int traverseSubs(int node, IloNumArray2 adjacency);
int listsizeHP;
int listsizeLP;
int listsizeHG;
bool detailed_output=false;
bool very_detailed_output=false;
bool initial_output=false;
bool edges_output=false;
bool neighbor_output=false;
bool contiguety_output=false;
bool evaluation_output=false;
bool sortinrev(const pair<int,int> &a, const pair<int,int> &b)
{
return (a.first > b.first);
}
/// MAIN PROGRAM ///
int main(int argc, char **argv)
{
unsigned long int startTime=time(NULL);//random is system time in sec.
unsigned long int random= time(NULL);//fixed system time
//unsigned long int random= 1572734411;//fixed system time
//cout<<"Our random number seed = "<<random<<endl;
srand(random);//uses random to set the seed for random # generation
IloEnv env;
unsigned long int initialtime=0;
long int myTime0 = time(NULL);
try
{
IloInt i, j, k;
IloInt noDistricts;
IloNum Vote;
IloNum Pbar;
IloNumArray subPop(env);
IloNumArray democrats(env);
IloNumArray2 bLength(env);
IloNumArray amount(env);
IloNumArray area(env);
IloNumArray perimeter(env);
IloNumArray voter(env);
IloNumArray republicans(env);
IloNumArray2 adjacency(env);
IloNumArray corners(env);
IloNumArray black(env);
IloNumArray hispanic(env);
IloNumArray address(env);
IloNumArray senAddress(env);
IloNumArray2 split(env);
//Weights
IloNum w1, w2, w3, w4;
w1 = 1;
w2 = 0.05;
w3 = 0;
w4 = 0;
// Simulated Anealing Method:///////////////////////////////////////////////
//(1) The following parameters relate to the simulated annealling procedure.
const int NUM_ITERATIONS = 100;
const double TEMP_FACTOR = 0.997; //0.9 //0.99 //0.999 //0.995
const double START_TEMPERATURE = 0.01; //0.01; //1000 //400 //50 // 300 // 100 //200
//(2) Use a simulated annealing neighborhood search procedure to improve the initial solution.
double acceptanceProbability;
double temperature = START_TEMPERATURE;
double FinalTemp = 0.01; //0.1 // If needed
long int steps = 0;
long int iterations = 0;
bool accept;
bool reject;
bool given;
int noOfGeneration=0;
int noOfFeasibleSolution=0;
long int bestIteration = 0;
long int bestStep = 0;
double populationDeviation=0.02; //districts' population deviation
int noInitialGeneration=5; //no of initial maps
long int algorithmRunTime=60;
// Vectors for calculating districts penalty
int n;
vector < double > distPop;
vector < double > distPerimeter;
vector < double > distDem;
vector < double > distVote;
vector < double > distArea;
vector < double > distFPop;
vector < double > distFPerimeter;
vector < double > distFDem;
vector < double > distFVote;
vector < double > distFArea;
vector < double > Penalty;
vector < double > PenaltyP;
vector < double > PenaltyV;
vector < double > PenaltyCom;
vector < double > PenaltyR;
vector < double > OriginalPenalty;
vector < double > OriginalPenaltyP;
vector < double > OriginalPenaltyV;
vector < double > OriginalPenaltyCom;
vector < double > OriginalPenaltyR;
vector < double > OriginalFPenalty;
vector < double > OriginalFPenaltyP;
vector < double > OriginalFPenaltyV;
vector < double > OriginalFPenaltyCom;
vector < double > OriginalFPenaltyR;
vector < double > assignedMicroadjacency;
vector < int > dependentWards;
vector <vector <int >> accompanyingWards; // to store wards with only one neighbor
vector <vector <int >> splitWards; // to store split wards
vector <vector <int >> neighbors; // to store split wards
vector < double > insideWards;
vector < double > myNotContiguousWards;
///////////////////////////////////////////////////////////////////////////
// Variables for normalization
double meanP=0;
double meanV=0;
double meanVE=0;
double meanC=0;
double meanR=0;
double Sd1=0;
double Sd2=0;
double Sd3=0;
double SdP;
double SdV;
double SdVE;
double SdC;
double SdR;
///////////////////////////////////////////////////////////////////////////////
//reading data from files
if ( argc > 1 )
readData1 (argv[1], noDistricts, subPop);
else
readData1("C:\\Users\\royag\\Documents\\MyDissFiles\\WisconsinData_Processing\\BeChecked\\2018data\\PopCor1.txt", noDistricts, subPop);
if ( argc > 1 )
readData2 (argv[1], democrats);
else
readData2("C:\\Users\\royag\\Documents\\MyDissFiles\\WisconsinData_Processing\\BeChecked\\2018data\\DemocratsN.txt", democrats);
/*
if ( argc > 1 )
readData3 (argv[1], republicans);
else
readData3("C:\\Users\\royag\\Dropbox\\UWM\\Dissertation\\Gerrymandering\\Project 4\\WisconsinData\\Version1\\FinalReadableData\\Republicans.txt", republicans);
*/
if ( argc > 1 )
readData4 (argv[1], area);
else
readData4("C:\\Users\\royag\\Documents\\MyDissFiles\\WisconsinData_Processing\\BeChecked\\2018data\\Area.txt", area);
if ( argc > 1 )
readData5 (argv[1], perimeter);
else
readData5("C:\\Users\\royag\\Documents\\MyDissFiles\\WisconsinData_Processing\\BeChecked\\2018data\\Perimeter.txt", perimeter);
if ( argc > 1 )
readData6 (argv[1], voter);
else
readData6("C:\\Users\\royag\\Documents\\MyDissFiles\\WisconsinData_Processing\\BeChecked\\2018data\\VotersN.txt", voter);
if ( argc > 1 )
readData7 (argv[1], amount);
else
readData7("C:\\Users\\royag\\Documents\\MyDissFiles\\WisconsinData_Processing\\BeChecked\\2018data\\amount.txt", amount);
if ( argc > 1 )
readData8 (argv[1],bLength);
else
readData8("C:\\Users\\royag\\Documents\\MyDissFiles\\WisconsinData_Processing\\BeChecked\\2018data\\length.txt", bLength);
if ( argc > 1 )
readData9 (argv[1],adjacency);
else
readData9("C:\\Users\\royag\\Documents\\MyDissFiles\\WisconsinData_Processing\\BeChecked\\2018data\\adjacent.txt", adjacency);
if ( argc > 1 )
readData10 (argv[1],black);
else
readData10("C:\\Users\\royag\\Documents\\MyDissFiles\\WisconsinData_Processing\\BeChecked\\2018data\\BlackN.txt", black);
if ( argc > 1 )
readData11 (argv[1],hispanic);
else
readData11("C:\\Users\\royag\\Documents\\MyDissFiles\\WisconsinData_Processing\\BeChecked\\2018data\\HispanicN.txt", hispanic);
if ( argc > 1 )
readData12 (argv[1],address);
else
readData12("C:\\Users\\royag\\Documents\\MyDissFiles\\WisconsinData_Processing\\BeChecked\\2018data\\RepAddress.txt", address);
if ( argc > 1 )
readData13 (argv[1],senAddress);
else
readData13("C:\\Users\\royag\\Documents\\MyDissFiles\\WisconsinData_Processing\\BeChecked\\2018data\\SenAddress.txt", senAddress);
if ( argc > 1 )
readData14 (argv[1],split);
else
readData14("C:\\Users\\royag\\Documents\\MyDissFiles\\WisconsinData_Processing\\BeChecked\\2018data\\Splt9.txt", split);
// find the numbers of micro_districts
IloInt numSubs = amount.getSize()-1;
cout << "numSubs=" << numSubs << endl;
//cout << "democrats=" << democrats << endl;
/* ////////////////////Data Correction for splt6, 7, 10////////////////////
adjacency [4761][4664]=0;
adjacency [4761][4665]=0;
adjacency [4761][4688]=0;
adjacency [4761][4735]=0;
adjacency [4761][4771]=0;
adjacency [4761][4774]=0;
adjacency [5818][5883]=0;
adjacency [5818][4797]=0;
adjacency [4664][4761]=0;
adjacency [4665][4761]=0;
adjacency [4688][4761]=0;
adjacency [4735][4761]=0;
adjacency [4771][4761]=0;
adjacency [4774][4761]=0;
adjacency [5883][5818]=0;
adjacency [4797][5818]=0;
bLength [4761][4664]=0;
bLength [4761][4665]=0;
bLength [4761][4688]=0;
bLength [4761][4735]=0;
bLength [4761][4771]=0;
bLength [4761][4774]=0;
bLength [5818][5883]=0;
bLength [5818][4797]=0;
bLength [4664][4761]=0;
bLength [4665][4761]=0;
bLength [4688][4761]=0;
bLength [4735][4761]=0;
bLength [4771][4761]=0;
bLength [4774][4761]=0;
bLength [5883][5818]=0;
bLength [4797][5818]=0;
//*////////////////////////////////////
//Correction of neighbors
adjacency [3368][4311]=0;
adjacency [4311][3368]=0;
bLength [3368][4311]=0;
bLength [4311][3368]=0;
///////////////////////////////////////
// Find Average Population, Average number of microdistrict in each district and vote percentage for democrates in state
double GapD; // Democrats gap
double GapR; // Republicans gap
double sumDGap;
double sumRGap;
int Dem; // # of dem seats
int Rep; // # of rep seats
double VD=0; // Total Democrats
double VR; // Total Republicans
double TVoter=0; // Total voter
double Pw; // Average population for each ward(micro district)
double Pm=0; // micro_district population
for(int m=0; m< numSubs ; m++)
Pm =Pm+subPop[m];
Pbar= Pm / noDistricts; // Average Population (Per district)
//n = numSubs / noDistricts;
Pw=Pm/numSubs; // Average population per micro-district(ward)
for(int m=0; m< numSubs ; m++){
//republicans[m]=voter[m]-democrats[m];
VD=VD+democrats[m]; // Total Democrats
TVoter=TVoter+voter[m]; //Total voters
}
VR=TVoter-VD; // Total Republicans
Vote = VD / TVoter; // percentage of Democrats in state
double AvgNumDemocratsPerDistrict= VD/noDistricts; // Average Democrats per district
cout << "percentage of Democrats in state =" << Vote << endl;
cout << "AveragePopPerDistrict=" << Pbar << endl;
cout << "AvgNumDemocratsPerDistrict=" << AvgNumDemocratsPerDistrict << endl;
cout << "Pm=" << Pm << endl;
cout << "Total voters in state" << TVoter << endl;
//Create an output file for displaying the final results.
ofstream myOutputFile;
char excelOutputFileName[] = {'f','i','l','e','0','0','0','.','t','x','t','\0'};
for (i=0;i<=10;i++)
cout << excelOutputFileName[i];
cout << endl;
myOutputFile.open(excelOutputFileName);
if (myOutputFile.fail())
{
cerr << "File called " << excelOutputFileName << " could not be opened." << endl;
return 1;
}
/*
ofstream myOutputFile1;
char excelOutputFile1Name[] = {'f','i','l','e','1','1','1','.','t','x','t','\0'};
for (i=0;i<=10;i++)
cout << excelOutputFile1Name[i];
cout << endl;
myOutputFile1.open(excelOutputFile1Name);
if (myOutputFile1.fail())
{
cerr << "File called " << excelOutputFile1Name << " could not be opened." << endl;
return 1;
}
ofstream myOutputFile2;
char excelOutputFile2Name[] = {'f','i','l','e','2','2','2','.','t','x','t','\0'};
for (i=0;i<=10;i++)
cout << excelOutputFile2Name[i];
cout << endl;
myOutputFile2.open(excelOutputFile2Name);
if (myOutputFile2.fail())
{
cerr << "File called " << excelOutputFile2Name << " could not be opened." << endl;
return 1;
}
ofstream myOutputFile3;
char excelOutputFile3Name[] = {'f','i','l','e','3','3','3','.','t','x','t','\0'};
for (i=0;i<=10;i++)
cout << excelOutputFile3Name[i];
cout << endl;
myOutputFile3.open(excelOutputFile3Name);
if (myOutputFile3.fail())
{
cerr << "File called " << excelOutputFile3Name << " could not be opened." << endl;
return 1;
}
*/
myOutputFile <<"Our random number seed = "<<random<<endl;
//---------------------------------------------------------------------------------------------------------------------
//
// Generate the initial feasible solution
// first index refers to rows and second index refers to columns
int initial [6977][99];
int binitial [6977][99];
double firstPenalty; // Weighted Normalized total penalty
double firstPenaltyP; // Weighted Normalized total Pop penalty
double firstPenaltyV; // Weighted Normalized total Political fairness penalty
double firstPenaltyCom; // Weighted Normalized total compactness penalty
double firstPenaltyR; // Weighted Normalized total Residential penalty
double OriginalFirstPenalty; // unWeighted Normalized total penalty
double OriginalFirstPenaltyP; //unWeighted Normalized total Pop penalty
double OriginalFirstPenaltyV; // unWeighted Normalized total Political fairness penalty
double OriginalFirstPenaltyCom; // unWeighted Normalized total compactness penalty
double OriginalFirstPenaltyR; // unWeighted Normalized total Residential penalty
double beforeNormalFirstPenalty;//Unweighted not Normalized total penalty
double beforeNormalFirstPop; //unWeighted not Normalized total Pop penalty
double beforeNormalFirstVote; // unWeighted not Normalized total Political fairness penalty
double beforeNormalFirstCom; // unWeighted not Normalized total compactness penalty
double beforeNormalFirstRes; // unWeighted not Normalized total Residential penalty
//double beforeNormalFirstVote1;
//2 dim vectors for generating initial districts
static vector<int> district_seeds;
vector<vector<int>> district_nodes;
//2 dim vectors for district containts and edges
vector<vector<int>> corners_nodes;
vector<vector<int>> district_wards;
vector<vector<int>> Tempdistrict_wards;
vector<vector<int>> best_district_wards;
//2 dime vectors for storing the district of each words;
vector<vector<int>> ward_districts;
vector<vector<int>> Tempward_districts;
//2 dime vectors for neighbors of final best district
vector<vector<int>> neighbor_districts;
//vectors for storing districts' population, democrats' votes , Area and perimeter
vector<double> Popvect;
vector<double> Demvect;
vector<double> Repvect;
vector<double> Areavect;
vector<double> Perivect;
vector<double> RGapvect;
vector<double> DGapvect;
//Temporary vectors for storing districts' population, dem votes , Area and perimeter
vector<double> TPopvect;
vector<double> TDemvect;
vector<double> TRepvect;
vector<double> TAreavect;
vector<double> TPerivect;
vector<double> TRGapvect;
vector<double> TDGapvect;
vector<double> Address;
for (int i=0; i< noDistricts ; i++){
Address.push_back(address[i]);
}
vector<double> SenAddress;
for (int i=0; i< 33 ; i++){
SenAddress.push_back(senAddress[i]);
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////
//Find the wards with only one neighbor////////
///////////////////////////////////////////////
int W;
int cc;
accompanyingWards.resize(numSubs);
for(i=0; i<numSubs; i++){
W=0;
for(j=0; j<numSubs; j++){
W=W+adjacency[i][j];
if(adjacency[i][j]==1){
cc=j;
}
}
if (W==1){
accompanyingWards[cc].push_back(i);
//cout << "cc=" << cc << endl;
}
}
if (detailed_output==false){
int e=0;
for(i=0; i<numSubs; i++){
e=e+accompanyingWards[i].size();
}
cout << "accompanyingWards.size()=" << e << endl;
}
///////////////////////////////////////////////////////
//Find the wards which area inside another wards (one neighbor)////////
///////////////////////////////////////////////////////
for (int i=0; i <accompanyingWards.size(); i++){
if (accompanyingWards[i].size() > 0){
if(ceil(perimeter[accompanyingWards[i][0]])==ceil(bLength[i][accompanyingWards[i][0]])){
insideWards.push_back(accompanyingWards[i][0]);
}
//else{
//myOutputFile3 << "to be checked" << accompanyingWards[i][0] << endl;
//}
}
}
if (detailed_output==false){
cout << "insideWards.size()=" << insideWards.size() << endl;
}
////////////////////////////////////////////////////////////
// store split wards (exclude which are already considered as inside wards and single number)
/////////////////////////////////////////////////////////////
vector<vector<int>> MySplit;
splitWards.resize(numSubs);
MySplit.resize(numSubs);
for(i=0; i<numSubs; i++){
if(std::find(insideWards.begin(), insideWards.end(),i) == insideWards.end()){
for(j=0; j<numSubs; j++){
if(std::find(insideWards.begin(), insideWards.end(), j) == insideWards.end()){
if(split[i][j]==1){
MySplit[i].push_back(j);
}
}
else{
continue;
}
}
}
else{
continue;
}
}
///////////////////////////////////////////
/*
for(i=0; i<numSubs; i++)
for(j=0; j<MySplit[i].size(); j++){
if (i==4634){
cout << "4434 and " << j << "and" << MySplit[i][j]<< endl;
}
}
*/
/////////////////Test//////////////////////////////////
int e=0;
for(i=0; i<numSubs; i++)
for(j=0; j<MySplit[i].size(); j++){
for (k=0; k< insideWards.size() ; k++) {
if(std::find(splitWards[i].begin(), splitWards[i].end(),insideWards[k]) == splitWards[i].end()){
continue;
}
else{
cout << insideWards[k]<< endl;
e=e+1;
}
}
}
cout << "myinside=" << e << endl;
//////////////////////////////////////////////////////////
///////////////////////////////////////////////////////
////Store a chain of district which are moving together
/////////////////////////////////////////////////////////
for (int i=0; i<numSubs ; i++){
if(MySplit[i].size()>0){
int cs=0;
int cf=MySplit[i].size();
for (int j=0; j < MySplit[i].size(); j++) {
splitWards[i].push_back( MySplit[i][j] );
}
for (int k=cs; k<cf ;k++) {
//cout << giving_node_split[i] << endl;
//cout << "splitWards[giving_node_split[i]].size()"<< splitWards[giving_node_split[i]].size() << endl;
int h = splitWards[i][k];
if (MySplit[h].size() > 0) {
for (int jj=0 ; jj < MySplit[h].size(); jj++){
if(std::find(splitWards[i].begin(), splitWards[i].end(), MySplit[h][jj]) == splitWards[i].end() && MySplit[h][jj]!=i ){
splitWards[i].push_back(MySplit[h][jj]);
}
}
} else{
continue;
}
cs=cf;
cf=splitWards[i].size();
}
}
}
//*
//for(i=0; i<numSubs; i++){
for(j=0; j<splitWards[1234].size(); j++){
cout <<"split1234" <<j << splitWards[835][j]<< endl;
}
// }
//*/
e=0;
for(i=0; i<numSubs; i++){
e=e+splitWards[i].size();
}
cout << "splitWards.size()=" << e << endl;
////////////////////Add accompanying wards with one neighbor to split chain which is not inside the other wards///////////
for (int i=0; i <accompanyingWards.size(); i++){
if (accompanyingWards[i].size() > 0){
if(ceil(perimeter[accompanyingWards[i][0]])!=ceil(bLength[i][accompanyingWards[i][0]])){
accompanyingWards[i].erase(std::remove(begin(accompanyingWards[i]), end(accompanyingWards[i]), accompanyingWards[i][0]), end(accompanyingWards[i]));
if(std::find(splitWards[i].begin(), splitWards[i].end(),accompanyingWards[i][0]) == splitWards[i].end()){
splitWards[accompanyingWards[i][0]].push_back(i);
}
if(std::find(splitWards[accompanyingWards[i][0]].begin(), splitWards[accompanyingWards[i][0]].end(),i) == splitWards[accompanyingWards[i][0]].end()){
splitWards[accompanyingWards[i][0]].push_back(i);
}
}
}
}
//////////////////////////////////////////////////////////////////
/*
for(i=0; i<numSubs; i++){
for(j=0; j<splitWards[i].size(); j++){
cout <<i <<"split" <<j << splitWards[i][j]<< endl;
}
}
*/
e=0;
for(i=0; i<numSubs; i++){
e=e+splitWards[i].size();
}
cout << "splitWards.size()=" << e << endl;
/////////////////////////////////////////////////
// generate 100 initial **********************//
////////////////////////////////////////////////
int iniGenNumber = 0; // Number of initial districts
double bestbefnormpen = 999999999999999999;
// Vectors for mean and standard deviations(population, political fairness and compactness) for 100 initials
vector<double> listOfmeanP , listOfmeanV, listOfmeanVE , listOfmeanC , listOfmeanR , listOfSdP , listOfSdV, listOfSdC, listOfSdR ,listOfInia ,listOfInib ;
do {
firstPenalty=0; // Weighted Normalized total penalty
firstPenaltyP=0; // Weighted Normalized total Pop penalty
firstPenaltyV=0; // Weighted Normalized total Political fairness penalty
firstPenaltyCom=0; // Weighted Normalized total compactness penalty
firstPenaltyR=0;
OriginalFirstPenalty=0; // unWeighted Normalized total penalty
OriginalFirstPenaltyP=0; //unWeighted Normalized total Pop penalty
OriginalFirstPenaltyV=0; // unWeighted Normalized total Political fairness penalty
OriginalFirstPenaltyCom=0; // unWeighted Normalized total compactness penalty
OriginalFirstPenaltyR=0; // unWeighted Normalized total compactness penalty
beforeNormalFirstPenalty=0;//Unweighted not Normalized total penalty
beforeNormalFirstPop=0; //unWeighted Normalized total Pop penalty
beforeNormalFirstVote=0; // unWeighted Normalized total Political fairness penalty
beforeNormalFirstCom=0; // unWeighted Normalized total compactness penalty
beforeNormalFirstRes=0;
//double beforeNormalFirstVote1;
iniGenNumber= iniGenNumber+1;
// put the elements of initial matrix equal zero
for (i=0; i < numSubs; i++)
for(j=0; j<noDistricts; j++)
initial[i][j]=0;
// -- START: dynamic initialization of districts based on traversing
for (int ii = 0; ii < noDistricts; ii++) {
if (initial_output==true){
cout << "sub no " << ii;
//myOutputFile1 << "sub no " << ii;
}
vector<int> tmp_vec;
int tmp_seed;
do {
tmp_seed = rand() % numSubs;
} while (std::find(district_seeds.begin(), district_seeds.end(), tmp_seed) != district_seeds.end() || splitWards[tmp_seed].size() > 0 || accompanyingWards[tmp_seed].size()>0);
district_seeds.push_back(tmp_seed);
initial[tmp_seed][ii] = 1;
tmp_vec.push_back(tmp_seed); // create a single-member vector using the new seed
district_nodes.push_back(tmp_vec); // put the temp vector in the 2D vector
vector<int>().swap(tmp_vec); // free the temp vector to avoid memory hog
}
if (initial_output==true){
//myOutputFile1 << "\n\n District_seeds: ";
cout << "\n\n District_seeds: ";
for (int i = 0; i < district_nodes.size(); i++) {
for (int j = 0; j < district_nodes[i].size(); j++) {
//myOutputFile1 << district_nodes[i][j] << ", ";
cout << district_nodes[i][j] << ", ";
}
}
}
bool microdists_left;
do {
microdists_left = false;
// district_seeds_temp.clear();
for (int i = 0; i < district_nodes.size(); i++) {
if ( district_nodes[i].size() == 0 ) {
if (initial_output==true){
//myOutputFile1 << "\nDist. " << i << " has run out of bLength to grow.";
cout << "\nDist. " << i << " has run out of bLength to grow.";
}
continue; // no free neighbors left to grab, go to next seed
} else {
microdists_left = true;
}
vector<int> temp_neighbor_candidate;
for (int k = 0; k < numSubs; k++) {
// found a neighbor sub?
if (adjacency[district_nodes[i].back()][k] == 1) {
if (initial_output==true){
//myOutputFile1 << "\n Neighbor for node " << district_nodes[i].back() << ": node " << k;
cout << "\n Neighbor for node " << district_nodes[i].back() << ": node " << k;
}
// now is it unclaimed? i is index, *i is sub number
bool already_taken = false;
for (int m = 0; m < noDistricts; m++) {
if (initial[k][m] == 1) {
// check to see if K is already taken by one of the districts
if (initial_output==true){
//myOutputFile1 << " - already belongs to dist. " << m;
cout << " - already belongs to dist. " << m;
}
already_taken = true;
break;
}
}
if (already_taken == true) {
continue;
} else {
// push back all neighbors of last element of the vector
temp_neighbor_candidate.push_back(k);
}
}
}
if (initial_output==true){
//myOutputFile1 << "temp_neighbor_candidate.size()=" << temp_neighbor_candidate.size() << endl;
cout << "temp_neighbor_candidate.size()=" << temp_neighbor_candidate.size() << endl;
}
// check to see if the last element of vector did end up having at least one neighbor (above code)?
if ( temp_neighbor_candidate.size() > 0 ) {
// if yes, now pick one from the list of available neighbors
int chosen_node = temp_neighbor_candidate[ rand() % temp_neighbor_candidate.size() ];
// add the neighbor to the district
initial[chosen_node][i] = 1;
//add the neighbor to single vector that we are using for traverse
district_nodes[i].push_back(chosen_node);
// also push back any glued wards
if(splitWards[chosen_node].size() > 0){
for (int j =0; j< splitWards[chosen_node].size(); ++j) {
//myOutputFile1 << j <<"----" <<splitWards[chosen_node][j] << endl;
initial[splitWards[chosen_node][j]][i] = 1;
district_nodes[i].push_back(splitWards[chosen_node][j]);
//myOutputFile1 << "\nnode " << splitWards[chosen_node][j] << " now assigned to dist. " << i << " GLUED";
//myOutputFile1 << " -- initial[" << splitWards[chosen_node][j] << "][" << i << "] = 1";
}
}
// also push back any inside wards
if(accompanyingWards[chosen_node].size() > 0){
for (int j =0; j< accompanyingWards[chosen_node].size(); ++j) {
//myOutputFile1 << j <<"----" <<accompanyingWards[chosen_node][j] << endl;
initial[accompanyingWards[chosen_node][j]][i] = 1;
district_nodes[i].push_back(accompanyingWards[chosen_node][j]);
//myOutputFile1 << "\nnode " << accompanyingWards[chosen_node][j] << " now assigned to dist. " << i << " GLUED";
//myOutputFile1 << " -- initial[" << accompanyingWards[chosen_node][j] << "][" << i << "] = 1";
}
}
if (initial_output==true){
//myOutputFile1 << "\nnode " << chosen_node << " now assigned to dist. " << i;
//myOutputFile1 << " -- initial[" << chosen_node << "][" << i << "] = 1";
cout << "\nnode " << chosen_node << " now assigned to dist. " << i;
cout << " -- initial[" << chosen_node << "][" << i << "] = 1";
}
} else {
if (initial_output==true){
//myOutputFile1 << "\nnode " << district_nodes[i].back() << " is a dead end! removing from traverse list. " << i;
cout << "\nnode " << district_nodes[i].back() << " is a dead end! removing from traverse list. " << i;
}
// if couldn't find a neighbor for last element of vector, delet it
district_nodes[i].pop_back();
}
}
//cout << " \n=================================================== ";
} while (microdists_left == true);
// memory cleanup?
for (int ii = 0; ii < noDistricts; ii++) {
district_nodes[ii].clear();
vector<int>().swap(district_nodes[ii]);
}
vector<vector<int>>().swap(district_nodes);
vector<int>().swap(district_seeds);
// -- END: dynamic initialization of districts based on traversing
///////////Test/////////////////////////////////////
if (initial_output==true){
int sum;
for (int i=0; i< numSubs; i++) {
sum=0;
for (int j=0; j<noDistricts ; j++)
{
sum= sum + initial [i][j] ;
}
if (sum < 1) {
listOfInia.push_back(i);
}
}
//myOutputFile1 << "listOfInia.size "<<listOfInia.size() << ":" ;
//myOutputFile1 << "unassigned nodes" << endl;
cout << "listOfInia.size "<<listOfInia.size() << ":" ;
cout << "unassigned nodes" << endl;
for (auto ii=listOfInia.begin() ; ii< listOfInia.end() ; ii++){
//myOutputFile1 << *ii << "," ;
}
//myOutputFile1 << "neighbors=" << endl;
cout << "neighbors=" << endl;
for (auto ii=listOfInia.begin() ; ii< listOfInia.end() ; ii++){
for (int j=0; j< numSubs; j++) {
if (adjacency[*ii][j]==1){
listOfInib.push_back(j);
//myOutputFile1 << "neighbor["<<*ii << " ]:" << j << "," ;
cout << "neighbor["<<*ii << " ]:" << j << "," ;
}
}
listOfInib.clear();
}
////////////////////////////////////
// Print Current Districs
/*
for (i=0;i<numSubs;i++)
{
for (j=0;j<noDistricts;j++)
{
myOutputFile << initial[i][j] << ",";
}
myOutputFile << endl;
}
//*/
/////////////Test/////////////////
int cn =0;
int Tcn =0;
for (i=0;i<noDistricts;i++)
{
cn=0;
for (j=0;j<numSubs;j++){
if ( initial [j][i] ==1) {
cn=cn+1;
}
}
//myOutputFile1 << " Count col ["<< i << "]=" << cn << endl ;
cout << " Count col ["<< i << "]=" << cn << endl ;
Tcn = Tcn+cn;
}
//myOutputFile1 << "Tcn "<< Tcn << endl;
cout << "Tcn "<< Tcn << endl;
}
////////////////////////////////////////////////////////////////////////////////////////////
// find the first Penalty for each generated district
distPop.resize(numSubs+1);
distVote.resize(numSubs+1);
distDem.resize(numSubs+1);
distPerimeter.resize(numSubs+1);
distArea.resize(numSubs+1);
//distDGap.resize(numSubs+1);
//distRGap.resize(numSubs+1);
Rep=0; // # of Rep seats
Dem=0; // # of Dem seats
double P; // district population
double D; // democrats population per district
double R; // Republicans population per district
double Temp; // for perimeter
double sum; // for area
double G; //50 % +1 of district population
GapD=0; // Democrats gap
GapR=0; //Republicans gap;
sumDGap=0;
sumRGap=0;
int Res=0;
int r;
for (int i = 0; i < noDistricts; i++)
{
r=0;
// computing distPop[i]
P = 0; // Population of each district
D = 0; // Democrats of each district
R = 0; // Republicans of each district
sum=0; // for area
Temp=0; // for perimeter
for (int j = 0; j < numSubs ; j++ )
{
int c=0;
// Penalty for population
P += (subPop[j]*initial[j][i]);
//Penalty for deviation of democrate vote
D += (democrats[j]*initial[j][i]);
R += ((voter[j]-democrats[j])*initial[j][i]);
//// Penalty for compactness
//finding the perimeter of each district
if (initial[j][i]>0.99)
{
for (int k=0; k<numSubs; k++)
{
if ( initial [k][i]==1 && adjacency[j][k]> 0.99)
{
c += bLength[j][k];
}
}
}
// finding perimeter of each district
Temp = Temp + perimeter[j]-c;
// finding area of each district
sum = sum + area[j] * initial[j][i];
// Representatives' residency
if (initial[j][i]>0.99)
{
for (auto ii=Address.begin() ; ii < Address.end() ; ii++){
if (j==*ii){
r=r+1;
}
}
}
}
// Efficiency Gap (based on second definition of political fairness)
/*
G = ceil (0.50* (R+D));
if (D >= G) {
GapR = R ;
GapD = D - G;
}
else {
GapD = D ;
GapR = R - G;
}
//*/
// Parties' Seats (based on third definition of political fairness)
//*
if (R > D){
Rep=Rep+1;
}
else{
Dem=Dem+1;
}
//*/
//cout << " GapD , GapR" << GapD << ","<< GapR<< endl;
//(1)Pop penalty for each district
distPop[i] = abs(P-Pbar);
//distPop[i] = abs(pow((P-Pbar),2));
//distPop[i] = abs(pow((P-Pbar),3));
//(2)Political fairness penalty for each district
// based on first definition of political fairness
//distDem[i] = abs(D-AvgNumDemocratsPerDistrict);
// based on second definition of political fairness(Efficiency gap)
// sumDGap += GapD; //Total Democrats' Gap (state wide)
// sumRGap += GapR; //Total Republicans' Gap (state wide)
//(3)Lack of compactness(Perimeter and Area)
distPerimeter[i]=Temp;
distArea[i]=sum;
//(4)
if(r>0){
Res=Res+(r-1);
}
}
if (initial_output==true){
for (i = 0; i < noDistricts; i++)
//myOutputFile1<< "distPop" << i << "=" << distPop[i]<<endl;
cout << "distPop" << i << "=" << distPop[i]<<endl;
//for (i = 0; i < noDistricts ; i++)
// myOutputFile1 << "distDem" << i << "=" << distDem[i]<<endl;
// cout << "distDem" << i << "=" << distDem[i]<<endl;
for (i = 0; i < noDistricts; i++)
//myOutputFile1<< "distPerimeter" << i << "=" << distPerimeter[i]<<endl;
cout << "distPerimeter" << i << "=" << distPerimeter[i]<<endl;
for (i = 0; i < noDistricts; i++)
//myOutputFile1<< "distArea" << i << "=" << distArea[i]<<endl;
cout << "distArea" << i << "=" << distArea[i]<<endl;
for (i = 0; i < noDistricts; i++)
//myOutputFile1<< "Compactness" << i << "=" << pow(distPerimeter[i],2)/distArea[i]<<endl;
cout << "Compactness" << i << "=" << pow(distPerimeter[i],2)/distArea[i]<<endl;
}
// find mean and standard deviation for political fairness penalty and population
meanP=0;
meanV=0;
meanC=0;
meanR=0;
for (int i = 0; i < noDistricts; i++)
{
meanP += (distPop[i]);
//meanV += (distDem[i]);
meanC += pow(distPerimeter[i],2)/distArea[i]; //based on the second approach
}
meanP /= noDistricts;
//meanV /= noDistricts;
meanC /= noDistricts;
meanR =Res;
//For Efficiency Gap (Second definition of political fairness)
//meanV = abs((sumDGap)/VD-(sumRGap)/VR); // Can be changed
//based on Third definition of political fairness(# of parties seats)
meanV = abs(Rep - ceil ((1-Vote)*noDistricts)); // Or meanV = abs(Dem - ceil ((Vote)*noDistricts));
listOfmeanP.push_back(meanP);
listOfmeanV.push_back(meanV);
listOfmeanC.push_back(meanC);
listOfmeanR.push_back(meanR);
for( int i = 0; i < noDistricts; i++ )
{
Sd1 += pow((distPop[i] - meanP), 2);
//Sd2 += pow((distDem[i] - meanV), 2);
// Compactness equal to perimeter square over area
Sd3 += pow(((pow((distPerimeter[i]),2)/distArea[i]) - meanC),2);
}
Sd1 /= noDistricts;
//Sd2 /= noDistricts;
Sd3 /= noDistricts;
SdP = sqrt(Sd1);
//SdV = sqrt(Sd2);
SdC = sqrt(Sd3);
listOfSdP.push_back(SdP);
//listOfSdV.push_back(SdV);
listOfSdC.push_back(SdC);
} while (iniGenNumber < noInitialGeneration); // for generating 100 initials
////////////////////////////////////////////////////////////////////////////////////////////////
// average mean and standard deviation of 100 initials
double TempMeanP=0;
for (auto ii=listOfmeanP.begin() ; ii< listOfmeanP.end() ; ii++){
TempMeanP += *ii;
}
meanP= TempMeanP/ listOfmeanP.size() ;
//*
double TempMeanV=0;
for (auto ii=listOfmeanV.begin() ; ii< listOfmeanV.end() ; ii++){
TempMeanV += *ii;
}
meanV= TempMeanV/ listOfmeanV.size() ;
//*/
double TempMeanC=0;
for (auto ii=listOfmeanC.begin() ; ii< listOfmeanC.end() ; ii++){
TempMeanC += *ii;
//cout << "TempMeanC=" << TempMeanC << endl;
}
meanC= TempMeanC / listOfmeanC.size() ;
double TempMeanR=0;
for (auto ii=listOfmeanR.begin() ; ii< listOfmeanR.end() ; ii++){
TempMeanR += *ii;
//cout << "TempMeanC=" << TempMeanC << endl;
}
meanR= TempMeanR / listOfmeanR.size() ;
double TempSdP=0;
for (auto ii=listOfSdP.begin() ; ii< listOfSdP.end() ; ii++){
TempSdP += *ii;
}
SdP= TempSdP/ listOfSdP.size() ;
/*
double TempSdV=0;
for (auto ii=listOfSdV.begin() ; ii< listOfSdV.end() ; ii++){
TempSdV += *ii;
}
SdV= TempSdV/ listOfSdV.size() ;
//*/
//*
double TempSdV=0;
for (auto ii=listOfmeanV.begin() ; ii< listOfmeanV.end() ; ii++){
TempSdV +=pow((listOfmeanV[*ii] - meanV), 2) ;
}
SdV =TempSdV/listOfmeanV.size();
//*/
double TempSdC=0;
for (auto ii=listOfSdC.begin() ; ii< listOfSdC.end() ; ii++){
TempSdC += *ii;
}
SdC= TempSdC/ listOfSdC.size() ;
//*
double TempSdR=0;
for (auto ii=listOfmeanR.begin() ; ii< listOfmeanR.end() ; ii++){
TempSdR +=pow((listOfmeanR[*ii] - meanR), 2) ;
}
SdR =TempSdR/listOfmeanR.size();
//*/
cout << "meanP , meanV, meanC = " <<meanP << "," << meanV << "," << meanC << meanR << endl;
cout << "SdP , SdV, SdC , SdR = " << SdP << "," << SdV << "," << SdC << SdR << endl;
if (initial_output==true){
//myOutputFile1 << "meanP , meanV, meanC = " <<meanP << "," << meanV << "," << meanC << endl;
cout << "meanP , meanV, meanC = " <<meanP << "," << meanV << "," << meanC << endl;
//myOutputFile1 << "SdP , SdV, SdC = " << SdP << "," << SdV << "," << SdC << SdR << endl;
cout << "SdP , SdV, SdC = " << SdP << "," << SdV << "," << SdC << SdR << endl;
}
/////////////////////////////////////////////////////////////////////////////////////////////////////////
iniGenNumber=0;
do {
//cout << "bestbefnormpe=" << bestbefnormpen << endl;
firstPenalty=0; // Weighted Normalized total penalty
firstPenaltyP=0; // Weighted Normalized total Pop penalty
firstPenaltyV=0; // Weighted Normalized total Political fairness penalty
firstPenaltyCom=0; // Weighted Normalized total compactness penalty
firstPenaltyR=0;
OriginalFirstPenalty=0; // unWeighted Normalized total penalty
OriginalFirstPenaltyP=0; //unWeighted Normalized total Pop penalty
OriginalFirstPenaltyV=0; // unWeighted Normalized total Political fairness penalty
OriginalFirstPenaltyCom=0; // unWeighted Normalized total compactness penalty
OriginalFirstPenaltyR=0;
beforeNormalFirstPenalty=0;//Unweighted not Normalized total penalty
beforeNormalFirstPop=0; //unWeighted Normalized total Pop penalty
beforeNormalFirstVote=0; // unWeighted Normalized total Political fairness penalty
beforeNormalFirstCom=0; // unWeighted Normalized total compactness penalty
beforeNormalFirstRes=0;
//double beforeNormalFirstVote1;
iniGenNumber= iniGenNumber+1;
// put the elements of initial matrix equal zero
for (i=0; i < numSubs; i++)
for(j=0; j<noDistricts; j++)
initial[i][j]=0;
// -- START: dynamic initialization of districts based on traversing
for (int ii = 0; ii < noDistricts; ii++) {
if (initial_output==true){
cout << "sub no " << ii;
//myOutputFile1 << "sub no " << ii;
}
vector<int> tmp_vec;
int tmp_seed;
do {
tmp_seed = rand() % numSubs;
} while (std::find(district_seeds.begin(), district_seeds.end(), tmp_seed) != district_seeds.end() || splitWards[tmp_seed].size() > 0 || accompanyingWards[tmp_seed].size()>0);
district_seeds.push_back(tmp_seed);
initial[tmp_seed][ii] = 1;
tmp_vec.push_back(tmp_seed); // create a single-member vector using the new seed
district_nodes.push_back(tmp_vec); // put the temp vector in the 2D vector
vector<int>().swap(tmp_vec); // free the temp vector to avoid memory hog
}
if (initial_output==true){
//myOutputFile1 << "\n\n District_seeds: ";
cout << "\n\n District_seeds: ";
for (int i = 0; i < district_nodes.size(); i++) {
for (int j = 0; j < district_nodes[i].size(); j++) {
//myOutputFile1 << district_nodes[i][j] << ", ";
cout << district_nodes[i][j] << ", ";
}
}
}
bool microdists_left;
do {
microdists_left = false;
// district_seeds_temp.clear();
for (int i = 0; i < district_nodes.size(); i++) {
if ( district_nodes[i].size() == 0 ) {
if (initial_output==true){
//myOutputFile1 << "\nDist. " << i << " has run out of bLength to grow.";
//cout << "\nDist. " << i << " has run out of bLength to grow.";
}
continue; // no free neighbors left to grab, go to next seed
} else {
microdists_left = true;
}
vector<int> temp_neighbor_candidate;
for (int k = 0; k < numSubs; k++) {
// found a neighbor sub?
if (adjacency[district_nodes[i].back()][k] == 1) {
if (initial_output==true){
//myOutputFile1 << "\n Neighbor for node " << district_nodes[i].back() << ": node " << k;
//cout << "\n Neighbor for node " << district_nodes[i].back() << ": node " << k;
}
// now is it unclaimed? i is index, *i is sub number
bool already_taken = false;
for (int m = 0; m < noDistricts; m++) {
if (initial[k][m] == 1) {
// check to see if the node is already taken by one of the districts
if (initial_output==true){
//myOutputFile1 << " - already belongs to dist. " << m;
//cout << " - already belongs to dist. " << m;
}
already_taken = true;
break;
}
}
if (already_taken == true) {
continue;
} else {
// push back all neighbors of last element of the vector
temp_neighbor_candidate.push_back(k);
}
}
}
if (initial_output==true){
//myOutputFile1 << "temp_neighbor_candidate.size()=" << temp_neighbor_candidate.size() << endl;
//cout << "temp_neighbor_candidate.size()=" << temp_neighbor_candidate.size() << endl;
}
// check to see if the last element of vector has at least one neighbor?
if ( temp_neighbor_candidate.size() > 0 ) {
// if yes, now pick one from the list of available neighbors
int chosen_node = temp_neighbor_candidate[ rand() % temp_neighbor_candidate.size() ];
// add the neighbor to the district
initial[chosen_node][i] = 1;
//add the neighbor to single vector that we are using for traverse
district_nodes[i].push_back(chosen_node);
// also do the same for ward's potential glued wards
if( splitWards[chosen_node].size() > 0 ){
for (int j =0; j< splitWards[chosen_node].size(); ++j) {
//myOutputFile1 << j <<"----" << chosen_node << " : " <<splitWards[chosen_node][j] << endl;
initial[splitWards[chosen_node][j]][i] = 1;
//add the neighbor to single vector that we are using for traverse
district_nodes[i].push_back(splitWards[chosen_node][j]);
//myOutputFile1 << "\nnode " << splitWards[chosen_node][j]<< " now assigned to dist. " << i;
//myOutputFile1 << " -- initial[" << splitWards[chosen_node][j]<< "][" << i << "] = 1";
}
}
// also push back any inside wards
if(accompanyingWards[chosen_node].size() > 0){
for (int j =0; j< accompanyingWards[chosen_node].size(); ++j) {
//myOutputFile1 << j <<"----" <<accompanyingWards[chosen_node][j] << endl;
initial[accompanyingWards[chosen_node][j]][i] = 1;
district_nodes[i].push_back(accompanyingWards[chosen_node][j]);
//myOutputFile1 << "\nnode " << accompanyingWards[chosen_node][j] << " now assigned to dist. " << i << " GLUED";
//myOutputFile1 << " -- initial[" << accompanyingWards[chosen_node][j] << "][" << i << "] = 1";
}
}
if (initial_output==true){
//myOutputFile1 << "\nnode " << chosen_node << " now assigned to dist. " << i;
//myOutputFile1 << " -- initial[" << chosen_node << "][" << i << "] = 1";
//cout << "\nnode " << chosen_node << " now assigned to dist. " << i;
//cout << " -- initial[" << chosen_node << "][" << i << "] = 1";
}
} else {
if (initial_output==true){
//myOutputFile1 << "\nnode " << district_nodes[i].back() << " is a dead end! removing from traverse list. " << i;
//cout << "\nnode " << district_nodes[i].back() << " is a dead end! removing from traverse list. " << i;
}
// if couldn't find a neighbor for last element of vector, delet it
district_nodes[i].pop_back();
}
}
//cout << " \n=================================================== ";
} while (microdists_left == true);
// memory cleanup?
for (int ii = 0; ii < noDistricts; ii++) {
district_nodes[ii].clear();
vector<int>().swap(district_nodes[ii]);
}
vector<vector<int>>().swap(district_nodes);
vector<int>().swap(district_seeds);
// -- END: dynamic initialization of districts based on traversing
///////////Test/////////////////////////////////////
if (initial_output==true){
int sum;
for (int i=0; i< numSubs; i++) {
sum=0;
for (int j=0; j<noDistricts ; j++)
{
sum= sum + initial [i][j] ;
}
if (sum < 1) {
listOfInia.push_back(i);
}
}
//myOutputFile1 << "listOfInia.size "<<listOfInia.size() << ":" ;
//myOutputFile1 << "unassigned nodes" << endl;
//cout << "listOfInia.size "<<listOfInia.size() << ":" ;
//cout << "unassigned nodes" << endl;
for (auto ii=listOfInia.begin() ; ii< listOfInia.end() ; ii++){
//myOutputFile1 << *ii << "," ;
}
//myOutputFile1 << "neighbors=" << endl;
cout << "neighbors=" << endl;
for (auto ii=listOfInia.begin() ; ii< listOfInia.end() ; ii++){
for (int j=0; j< numSubs; j++) {
if (adjacency[*ii][j]==1){
listOfInib.push_back(j);
//myOutputFile1 << "neighbor["<<*ii << " ]:" << j << "," ;
cout << "neighbor["<<*ii << " ]:" << j << "," ;
}
}
listOfInib.clear();
}
////////////////////////////////////
// Print Current Districs
/*
for (i=0;i<numSubs;i++)
{
for (j=0;j<noDistricts;j++)
{
//myOutputFile << initial[i][j] << ",";
if (initial [i][j]==1){
myOutputFile << i << " " << j << endl;
}
}
//myOutputFile << endl;
}
//*/
/////////////Test/////////////////
int cn =0;
int Tcn =0;
for (i=0;i<noDistricts;i++)
{
cn=0;
for (j=0;j<numSubs;j++){
if ( initial [j][i] ==1) {
cn=cn+1;
}
}
//myOutputFile1 << " Count col ["<< i << "]=" << cn << endl ;
cout << " Count col ["<< i << "]=" << cn << endl ;
Tcn = Tcn+cn;
}
//myOutputFile1 << "Tcn "<< Tcn << endl;
cout << "Tcn "<< Tcn << endl;
}
////////////////////////////////////////////////////////////////////////////////////////////
// find the first Penalty for each generated district
distPop.resize(numSubs+1);
distVote.resize(numSubs+1);
distDem.resize(numSubs+1);
distPerimeter.resize(numSubs+1);
distArea.resize(numSubs+1);
//distDGap.resize(numSubs+1);
//distRGap.resize(numSubs+1);
Rep=0; // # of Rep seats
Dem=0; // # of Dem seats
double P; // district population
double D; // democrats population per district
double R; // Republicans population per district
double Temp; // for perimeter
double sum; // for area
double G; //50 % +1 of district population
GapD=0; // Democrats gap
GapR=0; //Republicans gap
sumDGap=0;
sumRGap=0;
int Res=0;
int r;
for (int i = 0; i < noDistricts; i++)
{
// computing distPop[i]
P = 0; // Population of each district
D = 0; // Democrats of each district
R = 0; // Republicans of each district
r=0;
sum=0; // for area
Temp=0; // for perimeter
for (int j = 0; j < numSubs ; j++ )
{
int c=0;
// Penalty for population
P += (subPop[j]*initial[j][i]);
//Penalty for deviation of democrate vote
D += (democrats[j]*initial[j][i]);
R += ((voter[j]-democrats[j])*initial[j][i]);
//// Penalty for compactness
//finding the perimeter of each district
if (initial[j][i]>0.99)
{
for (int k=0; k<numSubs; k++)
{
if ( initial [k][i]==1 && adjacency[j][k]> 0.99)
{
c +=bLength[j][k];
}
}
}
// finding perimeter of each district
Temp = Temp + perimeter[j]-c;
// finding area of each district
sum = sum + area[j] * initial[j][i];
if (initial[j][i]>0.99)
{
for (auto ii=Address.begin() ; ii < Address.end() ; ii++){
if (j==*ii){
r=r+1;
}
}
}
}
// Efficiency Gap (based on second definition of political fairness)
/*
G = ceil (0.50* (R+D));
if (D >= G) {
GapR = R ;
GapD = D - G;
}
else {
GapD = D ;
GapR = R - G;
}
//*/
// Parties' Seats (based on third definition of political fairness)
//*
if (R > D){
Rep=Rep+1;
}
else{
Dem=Dem+1;
}
//*/
//cout << " GapD , GapR" << GapD << ","<< GapR<< endl;
//(1)Pop penalty for each district
distPop[i] = abs(P-Pbar);
//distPop[i] = abs(pow((P-Pbar),2));
//distPop[i] = abs(pow((P-Pbar),3));
//(2)Political fairness penalty for each district
// based on first definition of political fairness
//distDem[i] = abs(D-AvgNumDemocratsPerDistrict);
// based on second definition of political fairness(Efficiency gap)
// sumDGap += GapD; //Total Democrats' Gap (state wide)
// sumRGap += GapR; //Total Republicans' Gap (state wide)
//(3)Lack of compactness(Perimeter and Area)
distPerimeter[i]=Temp;
distArea[i]=sum;
if (r>0){
Res=Res+r-1;
}
}
if (initial_output==true){
for (i = 0; i < noDistricts; i++)
//myOutputFile1<< "distPop" << i << "=" << distPop[i]<<endl;
cout << "distPop" << i << "=" << distPop[i]<<endl;
//for (i = 0; i < noDistricts ; i++)
// myOutputFile1 << "distDem" << i << "=" << distDem[i]<<endl;
// cout << "distDem" << i << "=" << distDem[i]<<endl;
for (i = 0; i < noDistricts; i++)
//myOutputFile1<< "distPerimeter" << i << "=" << distPerimeter[i]<<endl;
cout << "distPerimeter" << i << "=" << distPerimeter[i]<<endl;
for (i = 0; i < noDistricts; i++)
//myOutputFile1<< "distArea" << i << "=" << distArea[i]<<endl;
cout << "distArea" << i << "=" << distArea[i]<<endl;
for (i = 0; i < noDistricts; i++)
//myOutputFile1<< "Compactness" << i << "=" << pow(distPerimeter[i],2)/distArea[i]<<endl;
cout << "Compactness" << i << "=" << pow(distPerimeter[i],2)/distArea[i]<<endl;
}
for (i= 0; i < noDistricts; i++) {
OriginalFirstPenaltyP += 1000*(distPop[i]- meanP)/SdP;
//OriginalFirstPenaltyV += 1000* ( distDem[i]-meanV)/SdV;
OriginalFirstPenaltyCom += 1000* (pow(distPerimeter[i],2)/distArea[i]-meanC)/SdC;
}
//*
//beforeNormalFirstVote = abs((sumDGap)/VD-(sumRGap)/VR)/2; // Political Fairness for efficiency gap (second definition)
beforeNormalFirstVote = abs(Rep - ceil ((1-Vote)*noDistricts)); // Political Fairness for seats (third definition)
//OriginalFirstPenaltyV= (beforeNormalFirstVote); // normalized
OriginalFirstPenaltyV= 10000*(beforeNormalFirstVote-meanV)/SdV; // normalized
beforeNormalFirstRes = Res;
OriginalFirstPenaltyR=1000*(beforeNormalFirstRes-meanR)/SdR;
//cout << "OriginalFirstPenaltyP=" << OriginalFirstPenaltyP << endl;
//cout << "OriginalFirstPenaltyV=" << OriginalFirstPenaltyV << endl;
//cout << "OriginalFirstPenaltyCom=" << OriginalFirstPenaltyCom << endl;
//cout << "OriginalFirstPenaltyR=" << OriginalFirstPenaltyR << endl;
//*/
//weighted normalized penalty
firstPenalty = w1* OriginalFirstPenaltyP + w2* OriginalFirstPenaltyV + w3* OriginalFirstPenaltyCom+w4*OriginalFirstPenaltyR;
//cout << "firstPenalty=" << firstPenalty << endl;
if (firstPenalty < bestbefnormpen){
for (i=0; i < numSubs; i++){
for(j=0; j<noDistricts; j++)
{
binitial[i][j]=initial[i][j];
//cout << "done" << endl;
}
}
bestbefnormpen=firstPenalty;
}
//*
if (initial_output==true){
//myOutputFile << "iniGenNumber=" << iniGenNumber << endl;
cout << "iniGenNumber=" << iniGenNumber << endl;
}
} while (iniGenNumber < noInitialGeneration); // for generating 100 initials
///////////////////////////////////////////////////////
// solution of Best initial districts ***************//
///////////////////////////////////////////////////////
if (detailed_output==true){
cout << "informaton of the best initial districts" << endl;
}
// find the first Penalty for best initial solution
distFPop.resize(numSubs+1);
distFVote.resize(numSubs+1);
distFDem.resize(numSubs+1);
distFArea.resize(numSubs+1);
distFPerimeter.resize(numSubs+1);
firstPenalty=0; // Weighted Normalized total penalty
firstPenaltyP=0; // Weighted Normalized total Pop penalty
firstPenaltyV=0; // Weighted Normalized total Political fairness penalty
firstPenaltyCom=0; // Weighted Normalized total compactness penalty
firstPenaltyR=0;
OriginalFirstPenalty=0; // unWeighted Normalized total penalty
OriginalFirstPenaltyP=0; //unWeighted Normalized total Pop penalty
OriginalFirstPenaltyV=0; // unWeighted Normalized total Political fairness penalty
OriginalFirstPenaltyCom=0; // unWeighted Normalized total compactness penalty
OriginalFirstPenaltyR=0;
beforeNormalFirstPenalty=0;//Unweighted not Normalized total penalty
beforeNormalFirstPop=0; //unWeighted Normalized total Pop penalty
beforeNormalFirstVote=0; // unWeighted Normalized total Political fairness penalty
beforeNormalFirstCom=0;// unWeighted Normalized total compactness penalty
beforeNormalFirstRes=0;
double Pdist; // district population
//distDGap.resize(numSubs+1);
//distRGap.resize(numSubs+1);
double Ddist; // democrats population per district
double Rdist; // Republicans population per district
double Temp;
double sum;
double G; //50 % +1 of district population
GapD=0; // Democrats gap
GapR=0; //Republicans gap
double sumFDGap=0;
double sumFRGap=0;
int FRep=0; // # of Rep seats
int FDem=0; // # of Dem seats
int FRes;
int r;
for (int i = 0; i < noDistricts; i++)
{
//computing distPop[i]
Pdist = 0; // Population of each district
Ddist = 0; // Democrats of each district
Rdist = 0; // Republicans of each district
sum=0;
r=0;
Temp=0;
for (int j = 0; j < numSubs ; j++ )
{
int c=0;
//Penalty for population
Pdist += (subPop[j]*binitial[j][i]);
//Penalty for deviation of democrate vote
Ddist += (democrats[j]*binitial[j][i]);
Rdist += ((voter[j]-democrats[j])*binitial[j][i]);
//Penalty for lack of compactness
//finding the perimeter of each district
//*
if (binitial[j][i]>0.99)
{
for (int k=0; k<numSubs; k++)
{
if ( binitial [k][i]==1 && adjacency[j][k]> 0.99)
{
c +=bLength[j][k];
}
}
}
// finding perimeter of each district
Temp = Temp + perimeter[j]-c;
// finding area of each district
sum = sum + area[j] * binitial[j][i];
if (binitial[j][i]>0.99)
{
for (auto ii=Address.begin() ; ii < Address.end() ; ii++){
if (j==*ii){
r=r+1;
}
}
}
}
// store the information of each district in two 2-dim districts
Popvect.push_back(Pdist);
Demvect.push_back(Ddist);
Repvect.push_back(Rdist);
Areavect.push_back(sum);
Perivect.push_back(Temp);
TPopvect.push_back(Pdist);
TDemvect.push_back(Ddist);
TRepvect.push_back(Rdist);
TAreavect.push_back(sum);
TPerivect.push_back(Temp);
// Efficiency Gap (based on second definition of political fairness)
/*
G = ceil (0.50* (Rdist+Ddist));
if (Ddist >= G) {
GapR = Rdist ;
GapD = Ddist - G;
}
else {
GapD = Ddist ;
GapR = Rdist - G;
}
RGapvect.push_back(GapR);
TRGapvect.push_back(GapR);
DGapvect.push_back(GapD);
TDGapvect.push_back(GapD);
//*/
// Parties' Seats (based on third definition of political fairness)
//*
if (Rdist > Ddist){
FRep=FRep+1;
}
else{
FDem=FDem+1;
}
//*/
//(1)Pop penalty for each district
distFPop[i] = abs(Pdist-Pbar);
//distPop[i] = abs(pow((P-Pbar),2));
//distPop[i] = abs(pow((P-Pbar),3));
//(2)Political fairness penalty for each district
//1) based on first definition of political fairness
//distFDem[i] = abs(Ddist-AvgNumDemocratsPerDistrict);
// 2) based on third definition of political fairness(Efficiency gap)
// sumFDGap += GapD; //Total Democrats' Gap (state wide)
// sumFRGap += GapR; //Total Republicans' Gap (state wide)
//(3)Lack of compactness(Perimeter and Area)
distFPerimeter[i]=Temp;
distFArea[i]=sum;
if (r>0){
FRes=FRes+r-1;
}
if(detailed_output==true){
cout << "district["<< i <<"]: PopDeviation=" <<abs(Pdist-Pbar)/ Pbar << " and population= " << Pdist << " and ratio of democrats= " << Ddist/(Rdist+Ddist) << " and democrats's gap= " << GapD <<" and Republicans' gap=" << GapR << " and compactness=" << (pow((Temp),2)/sum) << endl;
}
}
///////////////////////////////////////////////////////////////////
/*
// For Effciency Gap and # of seats:
double TempMeanVE=0;
for (auto ii=listOfmeanV.begin() ; ii< listOfmeanV.end() ; ii++){
TempMeanVE += *ii;
}
meanVE= TempMeanVE/ listOfmeanVE.size() ;
for (auto ii=listOfmeanV.begin() ; ii< listOfmeanV.end() ; ii++){
SdVE += pow((listOfmeanV[*ii] - meanVE), 2);
}
SdVE=sqrt(SdVE/listOfmeanVE.size());
//*/
// Find the penalty for each districts
PenaltyP.resize(numSubs+1);
PenaltyV.resize(numSubs+1);
PenaltyCom.resize(numSubs+1);
PenaltyR.resize(numSubs+1);
OriginalFPenaltyP.resize(numSubs+1);
OriginalFPenaltyV.resize(numSubs+1);
OriginalFPenaltyCom.resize(numSubs+1);
OriginalFPenaltyR.resize(numSubs+1);
// I multiplied each penalty by 1000 to ease the SA functionality
for (i= 0; i < noDistricts; i++) {
OriginalFPenaltyP[i] = 1000 * (((distFPop[i]- meanP)/SdP));
//OriginalFPenaltyV[i] = 1000 * (((distFDem[i]-meanV)/SdV));
OriginalFPenaltyCom[i] = 1000 * (((pow(distFPerimeter[i],2)/distFArea[i])-meanC)/SdC);
}
for (i= 0; i < noDistricts; i++) {
//Original Not normalized penalty (Pure)
beforeNormalFirstPop += distFPop[i];
//beforeNormalFirstVote += distFDem[i];
beforeNormalFirstCom += pow((distFPerimeter[i]),2)/distFArea[i];
// Original normalized Penalty
//OriginalFirstPenalty += OriginalPenalty[i];
OriginalFirstPenaltyP += OriginalFPenaltyP[i];
//OriginalFirstPenaltyV += OriginalFPenaltyV[i];
OriginalFirstPenaltyCom += OriginalFPenaltyCom[i];
}
//*
// Political Fairness for efficiency gap and parties's seats
//beforeNormalFirstVote = abs((sumDGap)/VD-(sumRGap)/VR)/2; //Political Fairness for efficiency gap
beforeNormalFirstVote = abs(FRep - ceil ((1-Vote)*noDistricts)); //Political Fairness for parties's seats
OriginalFirstPenaltyV= 10000*(beforeNormalFirstVote-meanV)/SdV;
//OriginalFirstPenaltyV= (beforeNormalFirstVote);
firstPenaltyV = w2*OriginalFirstPenaltyV;
//*/
beforeNormalFirstRes = FRes; //Political Fairness for parties's seats
OriginalFirstPenaltyR= 1000*(beforeNormalFirstRes-meanR)/SdR;
firstPenaltyR = w2*OriginalFirstPenaltyR;
// Weighted Normalized penalty
firstPenaltyP = w1*OriginalFirstPenaltyP;
firstPenaltyV = w2*OriginalFirstPenaltyV;
firstPenaltyCom = w3* OriginalFirstPenaltyCom;
firstPenaltyR = w4* OriginalFirstPenaltyR;
// Total Penalty
OriginalFirstPenalty = OriginalFirstPenaltyP + OriginalFirstPenaltyV + OriginalFirstPenaltyCom+OriginalFirstPenaltyR; // Normalized not weighted Penalty
firstPenalty = firstPenaltyP + firstPenaltyV + firstPenaltyCom+firstPenaltyR; // Weighted Normalized Penalty
beforeNormalFirstPenalty= w1* beforeNormalFirstPop + w2* beforeNormalFirstVote + w3* beforeNormalFirstCom+w4*beforeNormalFirstRes; // Weighted not normalized Penalty
//myOutputFile << "firstPenalty~~~~~~~~~~~" << firstPenalty << endl;
double firstPenaltyStore=(beforeNormalFirstPop+beforeNormalFirstVote)/10; //Temprory
//cout << "OriginalFirstPenaltyP=" << OriginalFirstPenaltyP << endl;
//cout << "OriginalFirstPenaltyV=" << OriginalFirstPenaltyV << endl;
//cout << "OriginalFirstPenaltyCom=" << OriginalFirstPenaltyCom << endl;
//cout << "OriginalFirstPenaltyR=" << OriginalFirstPenaltyCom << endl;
//myOutputFile << "firstPenalty~~~~~~~~~~~" << firstPenalty << endl;
if (initial_output==true){
myOutputFile << "firstPenalty: " << firstPenalty << endl;
cout << " \n ** PENALTY: " << firstPenalty;
/*
for (i=0; i<numSubs; i++)
{
for (j=0; j<noDistricts; j++)
{
//myOutputFile2 << initial[i][j] << ",";
if (binitial[i][j]==1){
myOutputFile << i << " " << j ;
}
}
myOutputFile << endl;
}
//*/
/*
for (i=0; i<numSubs; i++)
{
for (j=0; j<noDistricts; j++)
{
if(binitial[i][j]==1){
//cout << "ward["<< i <<"]is assigned to district["<< j <<"];" <<endl;
}
}
}
//*/
//cout << "firstPenalty="<< firstPenalty << endl;
//cout << "normalizedWeightedPenaltyP="<< firstPenaltyP << endl;
//cout << "normalizedWeightedPenaltyV="<< firstPenaltyV<< endl;
//cout << "normalizedWeightedPenaltyCom="<< firstPenaltyCom << endl;
//cout << "unWeighted Normalized total penalty="<< OriginalFirstPenalty << endl;
//cout << "unWeighted Normalized Pop penalty="<< OriginalFirstPenaltyP << endl;
//cout << "unWeighted Normalized Political penalty="<< OriginalFirstPenaltyV << endl;
//cout << "unWeighted Normalized compactness penalty="<< OriginalFirstPenaltyCom << endl;
//cout << "unWeighted not Normalized total penalty="<< beforeNormalFirstPenalty << endl;
//cout << "unWeighted not Normalized Pop penalty="<< beforeNormalFirstPop << endl;
//cout << "unWeighted not Normalized Political penalty="<< beforeNormalFirstVote << endl;
//cout << "unWeighted not Normalized compactness penalty="<< beforeNormalFirstCom << endl;
}
myOutputFile << "firstPenalty="<< firstPenalty << endl;
myOutputFile << "normalizedWeightedPenaltyP="<< firstPenaltyP << endl;
myOutputFile << "normalizedWeightedPenaltyV="<< firstPenaltyV<< endl;
myOutputFile << "normalizedWeightedPenaltyCom="<< firstPenaltyCom << endl;
myOutputFile << "normalizedWeightedPenaltyR="<< firstPenaltyR << endl;
myOutputFile << "unWeighted Normalized total penalty="<< OriginalFirstPenalty << endl;
myOutputFile << "unWeighted Normalized Pop penalty="<< OriginalFirstPenaltyP << endl;
myOutputFile << "unWeighted Normalized Political penalty="<< OriginalFirstPenaltyV << endl;
myOutputFile << "unWeighted Normalized compactness penalty="<< OriginalFirstPenaltyCom << endl;
myOutputFile << "unWeighted Normalized Residency penalty="<< OriginalFirstPenaltyR << endl;
myOutputFile << "unWeighted not Normalized total penalty="<< beforeNormalFirstPenalty << endl;
myOutputFile << "unWeighted not Normalized Pop penalty="<< beforeNormalFirstPop << endl;
myOutputFile << "unWeighted not Normalized Political penalty="<< beforeNormalFirstVote << endl;
myOutputFile << "unWeighted not Normalized compactness penalty="<< beforeNormalFirstCom << endl;
myOutputFile << "unWeighted not Normalized Residency penalty="<< beforeNormalFirstRes << endl;
///////////////////////////////////////////////
//find the edges of initial ****************///
//////////////////////////////////////////////
vector<int> myCorners , myCoreNodes;
// cout << "Finding corners of dist: ";
for (int ii=0; ii < noDistricts; ii++ ){
// cout << ii << " ";
myCorners.clear();
myCoreNodes.clear();
for (int jj=0; jj < numSubs; jj++ ){
if (binitial[jj][ii] == 1) {
myCorners.push_back(jj);
//cout << "My jj is &&&&&&&&&&" << jj << endl
}
}
district_wards.push_back(myCorners);
Tempdistrict_wards.push_back(myCorners);
//for (auto i = myCorners.begin(); i != myCorners.end(); i++){
for(auto i = district_wards[ii].begin(); i!=district_wards[ii].end(); i++){
//cout << "My i is ============" << *i << endl;
for (int kk=0 ; kk< numSubs; kk++) {
if (adjacency [*i][kk]== 1){
// if kk sub isn't in ii dist, push it to myCoreNodes
//if (binitial[kk][ii]==0){
if(std::find(district_wards[ii].begin(), district_wards[ii].end(), kk) == district_wards[ii].end()){
myCoreNodes.push_back(*i);
break;
}
}
}
}
corners_nodes.push_back(myCoreNodes);
}
if (edges_output==true){
//myOutputFile << "\n\n corners_nodes: ";
cout << "\n\n edges_nodes: ";
for (int i = 0; i < corners_nodes.size(); i++) {
//myOutputFile << "wards of district " << i << endl;
cout << "wards of district " << i << endl;
for (int j = 0; j < district_wards[i].size(); j++) {
//myOutputFile1 << district_wards[i][j] << ", ";
cout << district_wards[i][j] << ", ";
}
cout << endl;
//myOutputFile << "edges of district " << i << endl;
cout << "updated edges of district " << i << endl;
for (int j = 0; j < corners_nodes[i].size(); j++) {
//myOutputFile << corners_nodes[i][j] << ", ";
cout << corners_nodes[i][j] << ", ";
}
cout << endl;
}
}
/////////////////////////////////////////////
//find each ward's district***************///
//////////////////////////////////////////////
for (int ii=0; ii < numSubs; ii++ ){
myCorners.clear();
for (int jj=0; jj < noDistricts; jj++ ){
if (binitial[ii][jj] == 1) {
myCorners.push_back(jj);
break;
}
}
ward_districts.push_back(myCorners);
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////neighbors of each wards//////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////////////
neighbors.resize(numSubs);
for(i=0; i<numSubs; i++){
for(j=0; j<numSubs; j++){
if(adjacency[i][j]==1){
neighbors[i].push_back(j);
}
}
}
//for (j=0; j<neighbors[4250].size(); j++) {
//cout << "neighbors[4250]" << neighbors[4250][j] << endl;
//}
//////////////////////////////////////////////////////////////////////////
//*
int el;
for(i=0; i<numSubs; i++){
//cout << "i----------------: " << i << endl;
for (auto j = MySplit[i].begin(); j != MySplit[i].end(); ++j) {
el=0;
//cout << "j----------------: " << *j << endl;
for (auto k = neighbors[*j].begin(); k != neighbors[*j].end(); ++k) {
//cout << "k----------------: " << *k << endl;
if(std::find( MySplit[i].begin(), MySplit[i].end(), *k) == MySplit[i].end() && *k != i){
continue;
}
else{
el=el+1;
}
}
if (el==0){
cout<< "Suspect Splt Ward: "<< *j << endl;
}
}
}
//*/
//////////////////////////////////////////////////////////////////////////////////////////////////////////////
//double currentDistPenalty = firstPenalty;
double bestDistPenalty = firstPenalty;
double bestDistPenaltyP = firstPenaltyP;
double bestDistPenaltyV = firstPenaltyV;
double bestDistPenaltyCom = firstPenaltyCom;
double bestDistPenaltyR = firstPenaltyR;
//double OriginalcurrentDistPenalty = OriginalFirstPenalty;
double OriginalBestDistPenalty = OriginalFirstPenalty;
double OriginalBestDistPenaltyP = OriginalFirstPenaltyP;
double OriginalBestDistPenaltyV = OriginalFirstPenaltyV;
double OriginalBestDistPenaltyCom = OriginalFirstPenaltyCom;
double OriginalBestDistPenaltyR = OriginalFirstPenaltyR;
double bestBeforeNormalPenalty=beforeNormalFirstPenalty ;
double bestBeforeNormalP= OriginalFirstPenaltyP;
double bestBeforeNormalV= OriginalFirstPenaltyV;
double bestBeforeNormalCom= OriginalFirstPenaltyCom ;
double bestBeforeNormalR= OriginalFirstPenaltyR;
double nextDistPenalty; //normalizedWeightedNextPenalty
double nextDistPenaltyP; //normalizedWeightedNextPopulationPenalty
double nextDistPenaltyV; //normalizedWeightedNextPliticalFairnessPenalty
double nextDistPenaltyCom; //normalizedWeightedNextCompactnessPenalty
double nextDistPenaltyR;
double OriginalNextDistPenalty; //normalizedUnWeightedNextPenalty
double OriginalNextDistPenaltyP; //normalizedUnWeightedNextPopulationPenalty
double OriginalNextDistPenaltyV; //normalizedUnWeightedNextPliticalFairnessPenalty
double OriginalNextDistPenaltyCom; //normalizedUnWeightedNextCompactnessPenalty
double OriginalNextDistPenaltyR;
double beforeNormalPenalty;//notNormalizedUnWeightedNextPenalty
double beforeNormalPop; //notNormalizedUnWeightedNextPopulationPenalty
double beforeNormalVote; //notNormalizedUnWeightedNextPliticalFairnessPenalty
double beforeNormalCom; //notNormalizedUnWeightedNextCompactnessPenalty
double beforeNormalRes;
//////////////////////////////////////////////
//The SA algorithm **********************/////
/////////////////////////////////////////////
int giving_node;
int giving_node_inside;
int giving_district;
int taking_node;
int taking_node_inside;
int taking_district;
int xx=0;
int inside;
int splt;
//Time Managing
double bestTime; // when we find best answer
double runTime=0; // Algorithm running time
unsigned long int neighbortime=0; // Total time for finding the neighbors
unsigned long int neighbortime00=0;
unsigned long int neighbortime01=0;
unsigned long int neighbortime02=0;
unsigned long int neighbortime03=0;
unsigned long int contiguityTime=0; // Total time for checking contiguity
unsigned long int contiguityTime00=0;
unsigned long int contiguityTime01=0;
unsigned long int contiguityTime02=0;
unsigned long int contiguityTime03=0;
unsigned long int feasibleTime=0; // Total time for finding neighbors and checking contiguity (checking feasiblity)
unsigned long int feasibleTime00=0;
unsigned long int feasibleTime01=0;
unsigned long int feasibleTime02=0;
unsigned long int feasibleTime03=0;
unsigned long int Penaltycaltime=0; // Total time for penalty calculation
unsigned long int Penaltycaltime00=0;
unsigned long int Penaltycaltime01=0;
unsigned long int Penaltycaltime02=0;
unsigned long int Penaltycaltime03=0;
unsigned long int AcceptanceTime=0; // Total time for evaluation of answers
unsigned long int AcceptanceTime00=0;
unsigned long int AcceptanceTime01=0;
unsigned long int AcceptanceTime02=0;
unsigned long int AcceptanceTime03=0;
vector<double> HighPopNodes; // high population deviation
vector<double> HighPop; // high population
vector<double> LowPopNodes; // low population deviation
//vector<double> HighGapNodes; // high eff gap
//vector<double> LowGapNodes; // low eff gap
vector<int> MyKK , MyNN, Mygt, Mygg, Mytg, Mytt; // Vectors for the neighbors of giving and taking nodes
vector<int> giving_node_split , taking_node_split ;
vector<int> other_contiguity_check;
//vector<int> giveSpltDist , takeSpltDist ;
initialtime += (time(NULL)-myTime0);
//////////////////////////////////////////////////////////////
///Running Algorithm**************************////////////////
/////////////////////////////////////////////////////////////
//while (temperature > FinalTemp) // We are using time limit so we no longer need this loop
//{
//cout << "FDem=" << FDem << endl;
//cout <<"FRep=" <<FRep << endl;
while (runTime < algorithmRunTime) // we can eliminate this loop to be solved without time limit
{
while (iterations < NUM_ITERATIONS) //Addind another loop to
{
//If desired, display the current districts under investigation.//iterations == 0 || iterations == NUM_ITERATIONS - 1
if (1 == 0)
{
cout << "\t" << "Starting a new iteration...we've already made "
<< iterations << " iterations and taken " << steps << " steps. Current temperature = " << temperature << endl;
cout << "\t" << "The current districts is: ";
for (i=0;i<numSubs;i++)
for (j=0;j<noDistricts;j++)
{
cout << binitial[j][i] << ",";
}
cout << endl;
//cout << "\t" << "CurrentdistPenalty = " << currentDistPenalty << endl;
}
///////////////////////////////////////////////////////////////////////////
long int myTime1 = time(NULL); // for feasiblity
int Cont=0;
//Setting up the next solution
do {
long int myTime2 = time(NULL); //for finding neighbor
long int myTime2_00 = time(NULL);
reject = false;
districts_to_check_contiguity.clear();
other_contiguity_check.clear();
noOfGeneration=noOfGeneration+1;
//////////////////////////////////////////////////////////////////////////////////////
// find a random micro district from a random district and give it to its own neighbor distric which has a common boundry with selected micro-district
if ((noOfGeneration%10)>8){
cout << "listsizeHP=" << listsizeHP << endl;
}
int q;
MyKK.clear();
MyNN.clear();
Mygg.clear();
Mygt.clear();
Mytg.clear();
Mytt.clear();
giving_node_split.clear();
taking_node_split.clear();
//giveSpltDist.clear();
//takeSpltDist.clear();
/*
//Sort district whith high population deviation based on their population
vector< pair <double, int >> vect; // Pair vector
//pair sort based on population
for (int i=0; i<HighPopNodes.size(); i++)
vect.push_back(make_pair(HighPop[i], HighPopNodes[i]));
//std::sort(LowGapNodes.begin(), LowGapNodes.end());
std::sort(vect.begin(), vect.end(), sortinrev);
//std::reverse(vect.begin(), vect.end());
/*
for (int i=0; i<HighPopNodes.size(); i++)
{
cout << vect[i].first << " "
<< vect[i].second << endl;
}
//*/
int r= rand()%10;
// find giving and taking districts and nodes
if (listsizeHP >= 1 ) {
/*
//int r= rand()%10;
if ( listsizeHP < 10){
q= rand() % listsizeHP;
giving_district = vect[q].second;
//giving_district = HighPopNodes[rand() % listsizeHP];
}
else{
//if (r<5){
//q= rand() % abs(listsizeHP/(rand()%8+1));
q= rand() % 10;
giving_district = vect[q].second;
// }
// else{
// q= rand() % abs(listsizeHP/10);
// giving_district = vect[q].second;
// }
}
//*/
//giving_district = vect[q].second;
if (r>3){
giving_district = HighPopNodes[rand() % listsizeHP];
}
else{
giving_district = rand() % noDistricts;
}
}
else {
giving_district = rand() % noDistricts;
}
//*
giving_node=corners_nodes[giving_district][rand() % corners_nodes[giving_district].size()];
//cout << "giving_node" << giving_node << endl;
//cout << "giving_district" << giving_district << endl;
inside=-1;
splt=-1;
if (Popvect[giving_district] > Pbar ){
if(accompanyingWards[giving_node].size()>0){
giving_node_inside=accompanyingWards[giving_node][0];
inside=1;
}
// cout << "giving_node" << giving_node << endl;
//*
if(splitWards[giving_node].size()>0){
for (auto i = splitWards[giving_node].begin(); i != splitWards[giving_node].end(); ++i) {
//cout << "splitWards[giving_node]" << *i << endl;
//cout <<"splitWards[giving_node]_dist=" << ward_districts[*i][0] << endl;
giving_node_split.push_back(*i);
}
splt=1;
}
}
if (splt==1){
for (auto i = giving_node_split.begin(); i != giving_node_split.end(); ++i) {
other_contiguity_check.push_back(ward_districts[*i][0]);
//giveSpltDist.push_back(ward_districts[*i][0]);
}
}
neighbortime00 += (time(NULL)-myTime2_00);
long int myTime2_01 = time(NULL);
////////////////////////////////////////////////////
//////////////////////////////////////////////////////
for (auto i = neighbors[giving_node].begin(); i!= neighbors[giving_node].end(); ++i) {
//cout << *i << endl;
if(std::find(district_wards[giving_district].begin(), district_wards[giving_district].end(), *i) == district_wards[giving_district].end() ){
//cout << *i << endl;
MyKK.push_back(*i);
}
else{
continue;
}
}
/////////////////////////////////////////
//*
if (splt==1){
for (auto k = giving_node_split.begin(); k!= giving_node_split.end(); ++k) {
//MyKK.erase(std::remove(begin(MyKK), end(MyKK), *k), end(MyKK));
for (auto i = neighbors[*k].begin(); i!= neighbors[*k].end(); ++i) {
//if(std::find(other_contiguity_check.begin(), other_contiguity_check.end(), ward_districts[*i][0]) == other_contiguity_check.end() && ward_districts[*i][0]!=giving_district ){
if(std::find(district_wards[ward_districts[*k][0]].begin(), district_wards[ward_districts[*k][0]].end(), *i) == district_wards[ward_districts[*k][0]].end() && ward_districts[*i][0]!=giving_district ){
//if(std::find(giving_node_split.begin(), giving_node_split.end(), *i) == giving_node_split.end() && *i!=giving_node ){
//cout << *i << endl;
MyKK.push_back(*i);
//}
}
else{
continue;
}
}
}
}
//*/
///////////////////////////////////////////
//*/
/*
cout << endl;
cout <<"MyKK _before:" << endl;
for (auto k = MyKK.begin(); k!= MyKK.end(); ++k)
{
cout << *k << ", " ;
}
//*/
//cout <<"MyKK.size()1=" << MyKK.size() << endl;
std::sort(MyKK.begin(), MyKK.end());
MyKK.erase(std::unique(MyKK.begin(), MyKK.end()), MyKK.end());
cout << endl;
//cout <<"MyKK.size()2=" << MyKK.size() << endl;
neighbortime01 += (time(NULL)-myTime2_01);
long int myTime2_02 = time(NULL);
////////////////////////////////////////////////////////
//*
taking_node = MyKK[rand() % (MyKK.size())];
taking_district=ward_districts[taking_node][0];
//cout << "taking_node=" << taking_node << endl;
//cout <<"taking_district="<< taking_district<< endl;
//*/
if (Popvect[giving_district] <= Pbar ){
if(accompanyingWards[taking_node].size()>0){
taking_node_inside=accompanyingWards[taking_node][0];
inside=2;
}
//*
if(splitWards[taking_node].size()>0){
for (auto i = splitWards[taking_node].begin(); i != splitWards[taking_node].end(); ++i) {
//if (*i != giving_node){
//cout << "splitWards[taking_node]=" << *i << endl;
//cout <<"splitWards[taking_node]_dist=" << ward_districts[*i][0] << endl;
taking_node_split.push_back(*i);
//}
}
splt=2;
}
}
std::sort(giving_node_split.begin(), giving_node_split.end());
giving_node_split.erase(std::unique(giving_node_split.begin(), giving_node_split.end()), giving_node_split.end());
std::sort(taking_node_split.begin(), taking_node_split.end());
taking_node_split.erase(std::unique(taking_node_split.begin(), taking_node_split.end()), taking_node_split.end());
if (splt==1){
giving_node_split.erase(std::remove(begin(giving_node_split), end(giving_node_split), taking_node), end(giving_node_split));
}
if (splt==2){
taking_node_split.erase(std::remove(begin(taking_node_split), end(taking_node_split), giving_node), end(taking_node_split));
}
if (splt==2){
for (auto i = taking_node_split.begin(); i != taking_node_split.end(); ++i) {
other_contiguity_check.push_back(ward_districts[*i][0]);
//takeSpltDist.push_back(ward_districts[*i][0]);
}
}
if (splt!=-1 && other_contiguity_check.size()>0){
for (auto i = other_contiguity_check.begin(); i!= other_contiguity_check.end(); ++i) {
districts_to_check_contiguity.push_back(*i);
}
}
districts_to_check_contiguity.push_back(taking_district);
districts_to_check_contiguity.push_back(giving_district);
std::sort(districts_to_check_contiguity.begin(), districts_to_check_contiguity.end());
districts_to_check_contiguity.erase(std::unique(districts_to_check_contiguity.begin(), districts_to_check_contiguity.end()), districts_to_check_contiguity.end());
neighbortime02 += (time(NULL)-myTime2_02);
////////////////////////////////Test//////////////////////////////
long int myTime2_03 = time(NULL);
if (Popvect[giving_district] > Pbar ){
given=true;
//cout << "Done0" << endl;
if (neighbor_output==true){
cout << "district [" << giving_district <<"] gives node ["<<giving_node <<"] to district ["<< taking_district <<"]" << endl;
}
// add the given node to taken district
Tempdistrict_wards[taking_district].push_back(giving_node);
// remove given node from given district
Tempdistrict_wards[giving_district].erase(std::remove(begin(Tempdistrict_wards[giving_district]), end(Tempdistrict_wards[giving_district]), giving_node), end(Tempdistrict_wards[giving_district]));
if (inside==1){
// add the given node to taken district
Tempdistrict_wards[taking_district].push_back(giving_node_inside);
// remove given node from given district
Tempdistrict_wards[giving_district].erase(std::remove(begin(Tempdistrict_wards[giving_district]), end(Tempdistrict_wards[giving_district]), giving_node_inside), end(Tempdistrict_wards[giving_district]));
}
if (splt==1){
for (auto i = giving_node_split.begin(); i != giving_node_split.end(); ++i) {
// remove given node from given district
Tempdistrict_wards[ward_districts[*i][0]].erase(std::remove(begin(Tempdistrict_wards[ward_districts[*i][0]]), end(Tempdistrict_wards[ward_districts[*i][0]]), *i), end(Tempdistrict_wards[ward_districts[*i][0]]));
// add the given node to taken district
Tempdistrict_wards[taking_district].push_back(*i);
}
}
///////////////////////////////////////////>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
// find the neighbor of the giving node and giving dist and giving node and taking dist
for (auto i = district_wards[giving_district].begin(); i != district_wards[giving_district].end(); ++i) {
if ( adjacency [*i][giving_node] == 1) {
Mygg.push_back(*i);
}
}
for (auto i = district_wards[taking_district].begin(); i != district_wards[taking_district].end(); ++i) {
if ( adjacency [*i][giving_node] == 1) {
Mygt.push_back(*i);
}
}
if (splt==1){
//*
for (auto k = giving_node_split.begin(); k!= giving_node_split.end(); ++k) {
for (auto i = district_wards[ward_districts[*k][0]].begin(); i != district_wards[ward_districts[*k][0]].end(); ++i) {
if ( adjacency [*i][*k] == 1) {
Mygg.push_back(*i);
}
}
}
//*/
for (auto i = district_wards[taking_district].begin(); i!= district_wards[taking_district].end(); ++i)
for (auto k = giving_node_split.begin(); k!= giving_node_split.end(); ++k) {
if ( adjacency [*k][*i] == 1) {
Mygt.push_back(*i);
}
}
}
}
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
else
{
given=false;
if (neighbor_output==true){
cout << "district ["<<taking_district<<"] gives node ["<<taking_node<<"] to district ["<<giving_district <<"]"<< endl;
}
//add the taken node to the low pop district
Tempdistrict_wards[giving_district].push_back(taking_node);
//remove the taken node from high pop district
Tempdistrict_wards[taking_district].erase(std::remove(begin(Tempdistrict_wards[taking_district]), end(Tempdistrict_wards[taking_district]), taking_node), end(Tempdistrict_wards[taking_district]));
if (inside==2){
//add the taken node to the low pop district
Tempdistrict_wards[giving_district].push_back(taking_node_inside);
//remove the taken node from high pop district
Tempdistrict_wards[taking_district].erase(std::remove(begin(Tempdistrict_wards[taking_district]), end(Tempdistrict_wards[taking_district]), taking_node_inside), end(Tempdistrict_wards[taking_district]));
}
if (splt==2){
for (auto i = taking_node_split.begin(); i != taking_node_split.end(); ++i) {
//remove the taken node from high pop district
Tempdistrict_wards[ward_districts[*i][0]].erase(std::remove(begin(Tempdistrict_wards[ward_districts[*i][0]]), end(Tempdistrict_wards[ward_districts[*i][0]]), *i), end(Tempdistrict_wards[ward_districts[*i][0]]));
//add the taken node to the low pop district
Tempdistrict_wards[giving_district].push_back(*i);
}
}
/////////////////////////////////////////////////>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
// find the neighbor of the taking node and giving dist and giving node and taking dist
for (auto i = district_wards[giving_district].begin(); i != district_wards[giving_district].end(); ++i) {
if ( adjacency [*i][taking_node] == 1) {
Mytg.push_back(*i);
}
}
for (auto i = district_wards[taking_district].begin(); i != district_wards[taking_district].end(); ++i) {
if ( adjacency [*i][taking_node] == 1) {
Mytt.push_back(*i);
}
}
if (splt==2){
for (auto k = taking_node_split.begin(); k!= taking_node_split.end(); ++k) {
for (auto i = district_wards[ward_districts[*k][0]].begin(); i != district_wards[ward_districts[*k][0]].end(); ++i) {
if ( adjacency [*i][*k] == 1) {
Mytt.push_back(*i);
}
}
}
for (auto i = district_wards[giving_district].begin(); i!= district_wards[giving_district].end(); ++i)
for (auto k = taking_node_split.begin(); k!= taking_node_split.end(); ++k) {
if ( adjacency [*k][*i] == 1) {
Mytg.push_back(*i);
}
}
}
}
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
neighbortime03 += (time(NULL)-myTime2_03);
neighbortime += (time(NULL)-myTime2);
//////////////////////////////////////////////////////////////
if (neighbor_output==true){
//*
cout << "giving_district=" << giving_district << endl;
cout << "giving_node=" << giving_node << endl;
cout << "taking_district=" << taking_district << endl;
cout << "taking_node=" << taking_node << endl;
//*/
}
////////////////////////////////////////
// Checking for contiguty*********//////
////////////////////////////////////////
long int myTime3 = time(NULL); //for checking contiguty
long int myTime3_00 = time(NULL);
if (contiguety_output==true){
cout << endl << "districts_to_check_contiguity: " << endl;
}
for (auto i = districts_to_check_contiguity.begin(); i != districts_to_check_contiguity.end(); ++i) {
subs_in_district.clear();
traversed_nodes.clear();
for (auto j = Tempdistrict_wards[*i].begin(); j!= Tempdistrict_wards[*i].end(); ++j) {
subs_in_district.push_back(*j);
//cout << *j << " ";
}
contiguityTime00 += (time(NULL)-myTime3_00);
long int myTime3_01 = time(NULL);
// start from the first sub and keep traversing nodes recursively
traverseSubs( subs_in_district[0], adjacency );
std::sort(traversed_nodes.begin(), traversed_nodes.end());
std::sort(subs_in_district.begin(), subs_in_district.end());
if (contiguety_output==true){
//myOutputFile2 << "traversed_nodes rearranged: ";
cout << "traversed_nodes rearranged: ";
for (auto ii = traversed_nodes.begin(); ii != traversed_nodes.end(); ++ii) {
//myOutputFile2 << *ii << " ";
cout << *ii << " ";
}
//myOutputFile2 << "subs_district rearranged: ";
cout << "subs_district rearranged: ";
for (auto ii = subs_in_district.begin(); ii != subs_in_district.end(); ++ii) {
//myOutputFile2 << *ii << " ";
cout << *ii << " ";
}
//myOutputFile2 << endl;
cout << endl;
}
contiguityTime01 += (time(NULL)-myTime3_01);
long int myTime3_02 = time(NULL);
if (traversed_nodes == subs_in_district)
{
if (contiguety_output==true){
//myOutputFile2 << " ** District " << *i << " Contiguous **" << endl;
cout << " ** District " << *i << " Contiguous **" << endl;
}
}
else {
//Temprary Part //--------------------------------------------------------------------------------
if(given==true){
if(std::find(myNotContiguousWards.begin(), myNotContiguousWards.end(), giving_node) == myNotContiguousWards.end()){
myNotContiguousWards.push_back(giving_node);
}
}
else{
if(std::find(myNotContiguousWards.begin(), myNotContiguousWards.end(), giving_node) == myNotContiguousWards.end()){
myNotContiguousWards.push_back(taking_node);
}
}
//--------------------------------------------------------------------------------------------------
if (contiguety_output==true){
//myOutputFile2 << " ** District " << *i << " ** NOT Contiguous! **" << endl;
cout << " ** District " << *i << " ** NOT Contiguous! **" << endl;
}
//cout << "No" << endl;
reject = true;
/////////////////////////////////////////
// if it is not contiguet don't exchange the nodes
//Tempdistrict_wards[taking_district]=district_wards[taking_district];
//Tempdistrict_wards[giving_district]=district_wards[giving_district];
for (auto i = districts_to_check_contiguity.begin(); i != districts_to_check_contiguity.end(); ++i) {
Tempdistrict_wards[*i]=district_wards[*i];
}
////////////////////////////////////////
break;
}
contiguityTime02 += (time(NULL)-myTime3_02);
}
cout << endl;
contiguityTime += (time(NULL)-myTime3);
} while (reject == true);
// Cont=0;
feasibleTime += (time(NULL) - myTime1) ;
//////////////////////////////////////////////////////////////////////////////////////
//Evaluating objective value for the neighboring feasible solution just created ****//
/////////////////////////////////////////////////////////////////////////////////////
long int myTime4=time(NULL); //Penalty calculation time
// Penalty of new set of districts
distPop.resize(numSubs+1);
distDem.resize(numSubs+1);
distPerimeter.resize(numSubs+1);
distArea.resize(numSubs+1);
if ( given==true){
TPopvect[giving_district] = TPopvect[giving_district] - subPop[giving_node];
TPopvect[taking_district] = TPopvect[taking_district] + subPop[giving_node];
TDemvect[giving_district] = TDemvect[giving_district] - democrats[giving_node];
TDemvect[taking_district] = TDemvect[taking_district] + democrats[giving_node];
TRepvect[giving_district] = TRepvect[giving_district] - (voter[giving_node]-democrats[giving_node]);
TRepvect[taking_district] = TRepvect[taking_district] + (voter[giving_node]-democrats[giving_node]);
TAreavect[giving_district] = TAreavect[giving_district] - area[giving_node];
TAreavect[taking_district] = TAreavect[taking_district] + area[giving_node];
/*
for (auto i = Tempdistrict_wards[giving_district].begin(); i != Tempdistrict_wards[giving_district].end(); ++i) {
if (adjacency[*i][giving_node] == 1) {
if(std::find(accompanyingWards[giving_node].begin(), accompanyingWards[giving_node].end(), *i) == accompanyingWards[giving_node].end()){
Mygg.push_back(*i);
}
}
}
//*/
if (inside==1){
TPopvect[giving_district] = TPopvect[giving_district] - subPop[giving_node_inside];
TPopvect[taking_district] = TPopvect[taking_district] + subPop[giving_node_inside];
TDemvect[giving_district] = TDemvect[giving_district] - democrats[giving_node_inside];
TDemvect[taking_district] = TDemvect[taking_district] + democrats[giving_node_inside];
TRepvect[giving_district] = TRepvect[giving_district] - (voter[giving_node_inside]-democrats[giving_node_inside]);
TRepvect[taking_district] = TRepvect[taking_district] + (voter[giving_node_inside]-democrats[giving_node_inside]);
TAreavect[giving_district] = TAreavect[giving_district] - area[giving_node_inside];
TAreavect[taking_district] = TAreavect[taking_district] + area[giving_node_inside];
}
if (splt==1){
/*
for (auto i = giveSpltDist.begin(); i != giveSpltDist.end(); ++i) {
cout << "giveSpltDist.begin()=" << *i << endl;
}
*/
for (auto k = giving_node_split.begin(); k!= giving_node_split.end(); ++k) {
//cout << "gg ward_districts[*k][0]=" << ward_districts[*k][0] << endl;
TPopvect[ward_districts[*k][0]] = TPopvect[ward_districts[*k][0]] - subPop[*k];
TPopvect[taking_district] = TPopvect[taking_district] + subPop[*k];
TDemvect[ward_districts[*k][0]] = TDemvect[ward_districts[*k][0]] - democrats[*k];
TDemvect[taking_district] = TDemvect[taking_district] + democrats[*k];
TRepvect[ward_districts[*k][0]] = TRepvect[ward_districts[*k][0]] - (voter[*k]-democrats[*k]);
TRepvect[taking_district] = TRepvect[taking_district] + (voter[*k]-democrats[*k]);
TAreavect[ward_districts[*k][0]] = TAreavect[ward_districts[*k][0]] - area[*k];
TAreavect[taking_district] = TAreavect[taking_district] + area[*k];
}
}
}
else{
TDemvect[giving_district]= TDemvect[giving_district] + democrats[taking_node];
TDemvect[taking_district]= TDemvect[taking_district] - democrats[taking_node];
TPopvect[giving_district]= TPopvect[giving_district] + subPop[taking_node];
TPopvect[taking_district]= TPopvect[taking_district] - subPop[taking_node];
TRepvect[giving_district] = TRepvect[giving_district] + (voter[taking_node]-democrats[taking_node]);
TRepvect[taking_district] = TRepvect[taking_district] - (voter[taking_node]-democrats[taking_node]);
TAreavect[giving_district] = TAreavect[giving_district] + area[taking_node];
TAreavect[taking_district] = TAreavect[taking_district] - area[taking_node];
/*
for (auto i = Tempdistrict_wards[taking_district].begin(); i != Tempdistrict_wards[taking_district].end(); ++i) {
if ( adjacency [ *i][taking_node] == 1) {
if(std::find(accompanyingWards[taking_node].begin(), accompanyingWards[taking_node].end(), *i) == accompanyingWards[taking_node].end()){
Mytt.push_back( *i);
}
}
}
//*/
if (inside==2){
TDemvect[giving_district]= TDemvect[giving_district] + democrats[taking_node_inside];
TDemvect[taking_district]= TDemvect[taking_district] - democrats[taking_node_inside];
TPopvect[giving_district]= TPopvect[giving_district] + subPop[taking_node_inside];
TPopvect[taking_district]= TPopvect[taking_district] - subPop[taking_node_inside];
TRepvect[giving_district] = TRepvect[giving_district] + (voter[taking_node_inside]-democrats[taking_node_inside]);
TRepvect[taking_district] = TRepvect[taking_district] - (voter[taking_node_inside]-democrats[taking_node_inside]);
TAreavect[giving_district] = TAreavect[giving_district] + area[taking_node_inside];
TAreavect[taking_district] = TAreavect[taking_district] - area[taking_node_inside];
}
if (splt==2){
for (auto k =taking_node_split.begin(); k!= taking_node_split.end(); ++k) {
TDemvect[giving_district]= TDemvect[giving_district] + democrats[*k];
TDemvect[ward_districts[*k][0]]= TDemvect[ward_districts[*k][0]] - democrats[*k];
TPopvect[giving_district]= TPopvect[giving_district] + subPop[*k];
TPopvect[ward_districts[*k][0]]= TPopvect[ward_districts[*k][0]] - subPop[*k];
TRepvect[giving_district] = TRepvect[giving_district] + (voter[*k]-democrats[*k]);
TRepvect[ward_districts[*k][0]] = TRepvect[ward_districts[*k][0]] - (voter[*k]-democrats[*k]);
TAreavect[giving_district] = TAreavect[giving_district] + area[*k];
TAreavect[ward_districts[*k][0]] = TAreavect[ward_districts[*k][0]] - area[*k];
}
}
}
///////////////////////////
//Population penalty
for (auto k = districts_to_check_contiguity.begin(); k != districts_to_check_contiguity.end(); ++k) {
distPop[*k] = abs(TPopvect[*k]-Pbar);
}
// political fairness penalty based on first definition
//distDem[giving_district] = abs(TDemvect[giving_district]-AvgNumDemocratsPerDistrict);
//distDem[taking_district] = abs(TDemvect[taking_district]-AvgNumDemocratsPerDistrict);
for (auto k = districts_to_check_contiguity.begin(); k != districts_to_check_contiguity.end(); ++k) {
distArea[*k]= TAreavect[*k];
}
std::sort(Mygg.begin(), Mygg.end());
Mygg.erase(std::unique(Mygg.begin(), Mygg.end()), Mygg.end());
std::sort(Mytt.begin(), Mytt.end());
Mytt.erase(std::unique(Mytt.begin(), Mytt.end()), Mytt.end());
std::sort(Mytg.begin(), Mytg.end());
Mytg.erase(std::unique(Mytg.begin(), Mytg.end()), Mytg.end());
std::sort(Mygt.begin(), Mygt.end());
Mygt.erase(std::unique(Mygt.begin(), Mygt.end()), Mygt.end());
//////////////////////////////////////////////////////////////////////////////////////
///////////////////////Perimeter///////////////////////////////
///////////////////////////////////
if (given==true){
//(1) Update perimeter of given district
for (auto i = Mygg.begin(); i != Mygg.end(); ++i){
//*
if ( std::find(district_wards[giving_district].begin(), district_wards[giving_district].end(), *i) == district_wards[giving_district].end()) {
continue;
}
else{
if(inside==1 && std::find(insideWards.begin(), insideWards.end(), giving_node_inside) == insideWards.end()){
TPerivect[giving_district] += 2*(bLength[*i][giving_node]) - bLength[giving_node][giving_node_inside];
}
else{
TPerivect[giving_district] += 2 * (bLength[*i][giving_node]);
}
}
if (splt!=-1){
for (auto k =giving_node_split.begin(); k!= giving_node_split.end(); ++k) {
if ( std::find(district_wards[ward_districts[*k][0]].begin(), district_wards[ward_districts[*k][0]].end(), *i) == district_wards[ward_districts[*k][0]].end()) {
continue;
}
else{
TPerivect[ward_districts[*k][0]] += 2* (bLength[*i][*k]);
}
}
for (auto i =giving_node_split.begin(); i!= giving_node_split.end(); ++i)
for (auto j =giving_node_split.begin(); j!= giving_node_split.end(); ++j){
if (ward_districts[*i][0]==ward_districts[*j][0]){
TPerivect[ward_districts[*i][0]] += (bLength[*i][*j]);
}
}
}
//*/
}
TPerivect[giving_district] -= perimeter[giving_node];
if (inside!=-1){
TPerivect[giving_district] += -perimeter[giving_node]- perimeter[giving_node_inside];
}
if (splt!=-1){
for (auto k =giving_node_split.begin(); k!= giving_node_split.end(); ++k) {
TPerivect[ward_districts[*k][0]] -= perimeter[*k];
}
}
// >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>.
//(2) Update perimeter of taking district
for (auto i = Mygt.begin(); i != Mygt.end(); ++i){
//*
if ( std::find(district_wards[taking_district].begin(), district_wards[taking_district].end(), *i) == district_wards[taking_district].end()) {
continue;
}
else{
// if(inside==1 && std::find(insideWards.begin(), insideWards.end(), giving_node_inside) == insideWards.end()){
// TPerivect[taking_district] += -2*(bLength[*i][giving_node]) - bLength[giving_node][giving_node_inside];;
// }
// else{
TPerivect[taking_district] += -2*(bLength[*i][giving_node]);
// }
}
if (splt!=-1){
for (auto k =giving_node_split.begin(); k!= giving_node_split.end(); ++k) {
TPerivect[taking_district] += -2*(bLength[*i][*k]);
}
for (auto i =giving_node_split.begin(); i!= giving_node_split.end(); ++i)
for (auto j =giving_node_split.begin(); j!= giving_node_split.end(); ++j)
TPerivect[taking_district] -= (bLength[*i][*j]);
}
//*/
}
TPerivect[taking_district] += perimeter[giving_node];
if (splt!=-1){
for (auto k =giving_node_split.begin(); k!= giving_node_split.end(); ++k) {
TPerivect[taking_district] += perimeter[*k];
}
}
if (inside!=-1){
TPerivect[taking_district] += perimeter[giving_node]+ perimeter[giving_node_inside];
}
}
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
else{
//(1) Update perimeter of taken district
for (auto i = Mytt.begin(); i != Mytt.end(); ++i)
{
//*/
//*
if ( std::find(district_wards[taking_district].begin(), district_wards[taking_district].end(), *i) == district_wards[taking_district].end()) {
continue;
}
else{
if(inside==2 && std::find(insideWards.begin(), insideWards.end(), taking_node_inside) == insideWards.end()){
TPerivect[taking_district] += 2*(bLength[*i][taking_node]) - bLength[taking_node][taking_node_inside];
}
else{
TPerivect[taking_district] +=2* (bLength[*i][taking_node]);
}
}
if (splt!=-1){
for (auto k =taking_node_split.begin(); k!= taking_node_split.end(); ++k) {
if ( std::find(district_wards[ward_districts[*k][0]].begin(), district_wards[ward_districts[*k][0]].end(), *i) == district_wards[ward_districts[*k][0]].end()) {
continue;
}
else{
TPerivect[ward_districts[*k][0]] += 2* (bLength[*i][*k]);
}
}
for (auto i =taking_node_split.begin(); i!= taking_node_split.end(); ++i)
for (auto j =taking_node_split.begin(); j!= taking_node_split.end(); ++j){
if (ward_districts[*i][0]==ward_districts[*j][0]){
TPerivect[ward_districts[*i][0]] += (bLength[*i][*j]);
}
}
}
//*/
}
TPerivect[taking_district] -= perimeter[taking_node];
if (splt!=-1){
for (auto k =taking_node_split.begin(); k!= taking_node_split.end(); ++k) {
TPerivect[ward_districts[*k][0]] -= perimeter[*k];
}
}
if (inside!=-1){
TPerivect[taking_district] += -perimeter[taking_node]- perimeter[taking_node_inside];
}
//>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
//(2) Update perimeter of given district
for (auto i = Mytg.begin(); i != Mytg.end(); ++i){
//*
if ( std::find(district_wards[giving_district].begin(), district_wards[giving_district].end(), *i) == district_wards[giving_district].end()) {
continue;
}
else{
// if(inside==2 && std::find(insideWards.begin(), insideWards.end(), taking_node_inside) == insideWards.end()){
// TPerivect[giving_district] += -2*(bLength[*i][taking_node]) - bLength[giving_node][taking_node_inside];;
// }
// else{
TPerivect[giving_district] += -2*(bLength[*i][taking_node]);
// }
}
if (splt!=-1){
for (auto k =taking_node_split.begin(); k!= taking_node_split.end(); ++k) {
TPerivect[giving_district] += -2*(bLength[*i][*k]);
}
for (auto i =taking_node_split.begin(); i!= taking_node_split.end(); ++i)
for (auto j =taking_node_split.begin(); j!= taking_node_split.end(); ++j)
TPerivect[giving_district] -= (bLength[*i][*j]);
}
//*/
}
TPerivect[giving_district] += perimeter[taking_node];
if (splt!=-1){
for (auto k =taking_node_split.begin(); k!= taking_node_split.end(); ++k) {
TPerivect[giving_district] += perimeter[*k];
}
}
if (inside!=-1){
TPerivect[giving_district] += perimeter[taking_node]+ perimeter[taking_node_inside];
}
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//Perimeter
//distPerimeter[giving_district]= TPerivect[giving_district];
//distPerimeter[taking_district]=TPerivect[taking_district];
for (auto k = districts_to_check_contiguity.begin(); k != districts_to_check_contiguity.end(); ++k) {
distPerimeter[*k]=TPerivect[*k];
}
//*/
Dem=0;
Rep=0;
for ( int i=0; i< noDistricts ; i++){
if (TRepvect[i] > TDemvect[i]){
Rep=Rep+1;
}
else{
Dem=Dem+1;
}
}
if ((noOfGeneration%10)>8){
cout << Dem << " " << Rep << endl;
}
int Res=0;
//Update penalty of residency
for ( int i=0; i<noDistricts ;i++){
int r=0;
for (auto ii=Address.begin() ; ii < Address.end() ; ii++){
for (auto jj=district_wards[i].begin() ; jj < district_wards[i].end() ; jj++){
if (*jj==*ii){
r=r+1;
}
}
}
if (r>0){
Res=Res+r-1;
}
}
//cout << "Res=" << Res<< endl;
// Update efficiency gap (based on second definition of political fairness)
/*
for (auto i = districts_to_check_contiguity.begin(); i != districts_to_check_contiguity.end(); ++i) {
G = ceil (0.50* (TDemvect[*i]+TRepvect[*i]));
if (TDemvect[*i] >= G) {
TRGapvect[*i] = TRepvect[*i] ;
TDGapvect[*i] = TDemvect[*i] - G;
}
else {
TDGapvect[*i] = TDemvect[*i] ;
TRGapvect[*i] = TRepvect[*i] - G;
}
sumDGap = sumFDGap + DGapvect[*i] -DGapvect[*i];
sumRGap = sumFRGap + RGapvect[*i] -RGapvect[*i];
}
//*/
/////////////////////////////////////////////////////////////////////////////////////////////////
PenaltyP.resize(numSubs+1);
PenaltyV.resize(numSubs+1);
PenaltyCom.resize(numSubs+1);
PenaltyR.resize(numSubs+1);
OriginalPenaltyP.resize(numSubs+1);
OriginalPenaltyV.resize(numSubs+1);
OriginalPenaltyCom.resize(numSubs+1);
OriginalPenaltyR.resize(numSubs+1);
// Normalize the values using the first mean and standarsd deviation
// multiply by 1000 to ease SA performance (just to simplify calculation)
// Original Normalized penalty
//OriginalPenaltyP[giving_district] = 1000 * (((distPop[giving_district]-meanP)/SdP));
//OriginalPenaltyP[taking_district] = 1000 * (((distPop[taking_district]-meanP)/SdP));
for (auto k = districts_to_check_contiguity.begin(); k != districts_to_check_contiguity.end(); ++k) {
OriginalPenaltyP[*k] = 1000 * (((distPop[*k]-meanP)/SdP));
}
//Competitiveness
//OriginalPenaltyV[giving_district] = 1000 * (((distDem[giving_district]-meanV)/SdV));
//OriginalPenaltyV[taking_district] = 1000 * (((distDem[taking_district]-meanV)/SdV));
//OriginalPenaltyCom[giving_district] = 1000 *((((pow((distPerimeter[giving_district]),2)/distArea[giving_district])-meanC)/SdC));
//OriginalPenaltyCom[taking_district] = 1000 * ((((pow((distPerimeter[taking_district]),2)/distArea[taking_district])-meanC)/SdC));
for (auto k = districts_to_check_contiguity.begin(); k != districts_to_check_contiguity.end(); ++k) {
OriginalPenaltyCom[*k] = 1000 * ((((pow((distPerimeter[*k]),2)/distArea[*k])-meanC)/SdC));
}
//Plitical Fairness penalty based on efficiency gap (second definition)
//beforeNormalVote = abs(sumDGap/VD-sumRGap/VR)/2;
//Plitical Fairness penalty based on #of seats (third definition)
beforeNormalVote=abs(Rep - ceil ((1-Vote)*noDistricts));
OriginalNextDistPenaltyV=10000*(beforeNormalVote-meanV)/SdV;
//OriginalNextDistPenaltyV=(beforeNormalVote);
//*/
beforeNormalRes=Res;
OriginalNextDistPenaltyR=1000*(beforeNormalRes-meanR)/SdR;
//OriginalNextDistPenalty = OriginalFirstPenalty;
OriginalNextDistPenaltyP = OriginalFirstPenaltyP;
//OriginalNextDistPenaltyV = OriginalFirstPenaltyV;
OriginalNextDistPenaltyCom = OriginalFirstPenaltyCom;
//nextDistPenalty = firstPenalty;
//nextDistPenaltyP = firstPenaltyP;
//nextDistPenaltyV = firstPenaltyV;
//nextDistPenaltyCom = firstPenaltyCom;
beforeNormalPop =beforeNormalFirstPop;
//beforeNormalVote=beforeNormalFirstVote;
beforeNormalCom =beforeNormalFirstCom;
for (auto i = districts_to_check_contiguity.begin(); i != districts_to_check_contiguity.end(); ++i) {
// Original unweighted Normalized total penalty
OriginalNextDistPenaltyP += OriginalPenaltyP[*i] - OriginalFPenaltyP[*i];
//OriginalNextDistPenaltyV += OriginalPenaltyV[*i]- OriginalFPenaltyV[*i];
OriginalNextDistPenaltyCom += OriginalPenaltyCom[*i] - OriginalFPenaltyCom[*i];
// Original unweighted not-Normalized total penalty
beforeNormalPop += distPop[*i] -distFPop[*i];
//beforeNormalVote += distDem[*i]- distFDem[*i];
beforeNormalCom += pow(distPerimeter[*i],2)/distArea[*i] -pow(distFPerimeter[*i],2)/distFArea[*i];
}
// Original Normalized weighted Total penalty
nextDistPenaltyP = w1*OriginalNextDistPenaltyP;
nextDistPenaltyV = w2*OriginalNextDistPenaltyV;
nextDistPenaltyCom = w3*OriginalNextDistPenaltyCom;
nextDistPenaltyR = w4*OriginalNextDistPenaltyR;
OriginalNextDistPenalty= OriginalNextDistPenaltyP + OriginalNextDistPenaltyV +OriginalNextDistPenaltyCom+OriginalNextDistPenaltyR;
nextDistPenalty = nextDistPenaltyP + nextDistPenaltyV + nextDistPenaltyCom+nextDistPenaltyR;
Penaltycaltime += time(NULL) - myTime4; //Penalty calculation time
//cout << nextDistPenaltyP << "," << nextDistPenaltyV << "," << nextDistPenaltyCom << endl;
//cout << nextDistPenalty << endl;
//cout << "nextDistPenaltyP="<< nextDistPenaltyP << endl;
//cout << "nextDistPenaltyV="<< nextDistPenaltyV << endl;
//cout << "nextDistPenaltyCom="<< nextDistPenaltyCom << endl;
//cout << "nextDistPenaltyR="<< nextDistPenaltyR << endl;ou
if (evaluation_output==true){
cout << "nextDistPenalty="<< nextDistPenalty << endl;
cout << "nextDistPenaltyP="<< nextDistPenaltyP << endl;
cout << "nextDistPenaltyV="<< nextDistPenaltyV << endl;
cout << "nextDistPenaltyCom="<< nextDistPenaltyCom << endl;
cout << "nextDistPenaltyR="<< nextDistPenaltyR << endl;
}
noOfFeasibleSolution=noOfFeasibleSolution+1;
long int myTime5=time(NULL); // checking Possiblity (Acceptance)
if (evaluation_output==true){
cout << "unweighted not normalized NextDistPenaltyV=" << beforeNormalVote << endl;
cout << "unweighted normalized NextDistPenalty=" << OriginalNextDistPenalty << endl;
cout << "Weighted normalized nextDistPenalty=" << nextDistPenalty << endl;
cout << "firstPenalty=" << firstPenalty << endl;
}
//cout << "firstPenalty=" << firstPenalty << endl;
double objValDifference = (nextDistPenalty-firstPenalty);
//double objValDifference = beforeNormalVote-beforeNormalFirstVote;
if (evaluation_output==true){
cout << "difference = " <<objValDifference << endl;
}
///////////////////////////////////////////////////////////////////////////
long int myTime5_00=time(NULL);
if (objValDifference >= 0)
{
acceptanceProbability = exp((-1.00*objValDifference)/temperature);
if(evaluation_output==true){
/**/ cout << "\t" << "Candidate state's objective value is WORSE by "
/**/ << objValDifference << endl;
/**/ cout << "\t" << "The move to candidate state will be accepted with probability "
/**/ << acceptanceProbability << endl;
}
double randNum=(static_cast<double>(rand()))/(RAND_MAX+1);//rand()/32672.0;
if(evaluation_output==true){
/**/ cout << "\t" << "Random number generated is " << randNum;
}
if (randNum < acceptanceProbability)
{
if(detailed_output==true){
/**/ cout << "\t\t\t" << "Candidate is accepted." << endl;
}
accept = true;
steps++;
}
else
{
if(evaluation_output==true){
/**/ cout << "\t\t\t" << "Candidate is rejected." << endl;
}
accept = false;
//cout << "false" << endl;
for (auto i = districts_to_check_contiguity.begin(); i != districts_to_check_contiguity.end(); ++i) {
TDemvect[*i]= Demvect[*i];
TRepvect[*i]= Repvect[*i];
TPopvect[*i]= Popvect[*i];
TAreavect[*i] = Areavect[*i];
TPerivect[*i] = Perivect[*i];
Tempdistrict_wards[*i]=district_wards[*i];
}
}
}
else
{
if(evaluation_output==true){
/**/ cout << "\t" << "Candidate's objective value is BETTER by " << (-1.00)*(objValDifference)
/**/ << ", so candidate is automatically accepted." << endl;
}
accept = true;
steps++;
}
AcceptanceTime00 += time(NULL)-myTime5_00;
if (accept == true)
{
long int myTime5_01=time(NULL);
//cout << "true" << endl;
firstPenalty = nextDistPenalty;
firstPenaltyP = nextDistPenaltyP;
firstPenaltyV = nextDistPenaltyV;
firstPenaltyCom = nextDistPenaltyCom;
firstPenaltyR = nextDistPenaltyR;
OriginalFirstPenalty = OriginalNextDistPenalty ;
OriginalFirstPenaltyP = OriginalNextDistPenaltyP ;
OriginalFirstPenaltyV = OriginalNextDistPenaltyV;
OriginalFirstPenaltyCom = OriginalNextDistPenaltyCom;
OriginalFirstPenaltyR = OriginalNextDistPenaltyR;
beforeNormalFirstPop=beforeNormalPop ;
beforeNormalFirstVote=beforeNormalVote;
beforeNormalFirstRes= beforeNormalRes;
for (auto i = districts_to_check_contiguity.begin(); i != districts_to_check_contiguity.end(); ++i) {
Demvect[*i]= TDemvect[*i];
Repvect[*i]= TRepvect[*i];
Popvect[*i]= TPopvect[*i];
Areavect[*i] = TAreavect[*i];
Perivect[*i] = TPerivect[*i];
// Update the contained wards inside the given and taken districts
district_wards[*i]=Tempdistrict_wards[*i];
}
for (auto i = districts_to_check_contiguity.begin(); i != districts_to_check_contiguity.end(); ++i) {
distFPop[*i] = distPop[*i];
distFDem[*i] = distDem[*i];
distFPerimeter[*i]=distPerimeter[*i];
distFArea[*i]= distArea[*i];
OriginalFPenaltyP[*i]= OriginalPenaltyP[*i];
OriginalFPenaltyV[*i]=OriginalPenaltyV[*i];
OriginalFPenaltyCom[*i]=OriginalPenaltyCom[*i];
OriginalFPenaltyR[*i]=OriginalPenaltyR[*i];
}
AcceptanceTime01 += time(NULL)-myTime5_01;
long int myTime5_02=time(NULL);
vector < double > myTempCorner;
//update the edges of district
//*
if ( given==true ) {
//(1) add the neighbors of the given node to the corner of given district if they aren't already edges
for (auto i=district_wards[giving_district].begin() ; i != district_wards[giving_district].end() ; i++) {
if (adjacency [*i][giving_node]==1) {
if(std::find(corners_nodes[giving_district].begin(), corners_nodes[giving_district].end(), *i) == corners_nodes[giving_district].end()){
corners_nodes[giving_district].push_back(*i);
}
}
}
if (splt==1){
for (auto k = giving_node_split.begin(); k!= giving_node_split.end(); ++k) {
for (auto i = neighbors[*k].begin(); i!= neighbors[*k].end(); ++i) {
if(std::find(giving_node_split.begin(), giving_node_split.end(), *i) == giving_node_split.end()){
if (ward_districts[*k][0]==ward_districts[*i][0] && ward_districts[*k][0]!=taking_district ){
if(std::find(corners_nodes[ward_districts[*k][0]].begin(), corners_nodes[ward_districts[*k][0]].end(), *i) == corners_nodes[ward_districts[*k][0]].end()){
corners_nodes[ward_districts[*k][0]].push_back(*i);
}
}
}
}
}
}
///////////////////////////////////////////////////////////////////////////////////////////////////////////
//(2) earese given node from given district edge
corners_nodes[giving_district].erase(std::remove(begin(corners_nodes[giving_district]), end(corners_nodes[giving_district]), giving_node), end(corners_nodes[giving_district]));
if (splt==1){
for (auto k = giving_node_split.begin(); k!= giving_node_split.end(); ++k) {
//cout << "ward_districts[*k][0]" << ward_districts[*k][0]<< endl;
if(ward_districts[*k][0]!=taking_district){
corners_nodes[ward_districts[*k][0]].erase(std::remove(begin(corners_nodes[ ward_districts[*k][0]]), end(corners_nodes[ ward_districts[*k][0]]), *k), end(corners_nodes[ ward_districts[*k][0]]));
}
}
}
// update the assigened district to each ward
ward_districts[giving_node][0]=taking_district;
if(inside==1){
ward_districts[giving_node_inside][0]=taking_district;
}
if (splt==1){
for (auto k = giving_node_split.begin(); k!= giving_node_split.end(); ++k) {
//cout << "ward_districts[*k][0] before " << ward_districts[*k][0]<< endl;
ward_districts[*k][0]=taking_district;
// cout << "ward_districts[*k][0] after " << ward_districts[*k][0]<< endl;
}
}
///////////////////////////////////////////////////////////////////////////////////////////////////////////
//(4) add given node to taken district corner
// if(std::find(corners_nodes[taking_district].begin(), corners_nodes[taking_district].end(), giving_node) == corners_nodes[taking_district].end()){
corners_nodes[taking_district].push_back(giving_node);
// }
if (splt==1){
for (auto k = giving_node_split.begin(); k!= giving_node_split.end(); ++k) {
//if(std::find(corners_nodes[taking_district].begin(), corners_nodes[taking_district].end(), *k) == corners_nodes[taking_district].end()){
if(ward_districts[*k][0]!=taking_district){
corners_nodes[taking_district].push_back(*k);
}
}
}
///////////////////////////////////////////////////////////////////////////////////////////////////////////
//(3)update the edges of taken distict
myTempCorner.clear();
for (auto i=corners_nodes[taking_district].begin() ; i != corners_nodes[taking_district].end() ; i++) {
if (adjacency [*i][giving_node]==1) {
//cout << "neigbor of given node" << *i << endl;
myTempCorner.push_back(*i);
}
if (splt==1){
for (auto k = giving_node_split.begin(); k!= giving_node_split.end(); ++k) {
if (adjacency [*i][*k]==1) {
//cout << "neigbor of given node" << *i << endl;
myTempCorner.push_back(*i);
}
}
}
}
sort( myTempCorner.begin(), myTempCorner.end() );
myTempCorner.erase(std::unique(myTempCorner.begin(), myTempCorner.end()),myTempCorner.end());
int u;
for (auto j=myTempCorner.begin(); j!=myTempCorner.end(); j++ ){
u=-1;
for (int i=0 ; i < corners_nodes.size() ; i++) {
for (int k=0 ; k < corners_nodes[i].size() ; k++) {
if(i!= taking_district ){
//if (corners_nodes[i][k]!=giving_node && adjacency [*j][corners_nodes[i][k]]==1){
if (adjacency [*j][corners_nodes[i][k]]==1){
u=0;
break;
// corners_nodes[taking_district].erase(corners_nodes[taking_district].begin()+ *j);
}
}
else{
break;
}
}
if (u==0) {
break;
}
}
if (u==-1) {
// cout << "Removed_node" <<*j << endl;
corners_nodes[taking_district].erase(std::remove(begin(corners_nodes[taking_district]), end(corners_nodes[taking_district]), *j), end(corners_nodes[taking_district]));
}
}
///////////////////////////////////////////////////////////////////////////////////////////////////////////
}
else {
//(1) add the neighbors of the taken node to the corner of taken district
for (auto i=district_wards[taking_district].begin() ; i != district_wards[taking_district].end() ; i++) {
if (adjacency [*i][taking_node]==1){
if ( std::find(corners_nodes[taking_district].begin(), corners_nodes[taking_district].end(), *i) == corners_nodes[taking_district].end()) {
corners_nodes[taking_district].push_back(*i);
}
}
}
if (splt==2){
for (auto k = taking_node_split.begin(); k!= taking_node_split.end(); ++k) {
for (auto i = neighbors[*k].begin(); i!= neighbors[*k].end(); ++i) {
if(std::find(taking_node_split.begin(), taking_node_split.end(), *i) == taking_node_split.end()){
if (ward_districts[*k][0]==ward_districts[*i][0] && ward_districts[*k][0]!=giving_district){
if(std::find(corners_nodes[ward_districts[*k][0]].begin(), corners_nodes[ward_districts[*k][0]].end(), *i) == corners_nodes[ward_districts[*k][0]].end()){
corners_nodes[ward_districts[*k][0]].push_back(*i);
}
}
}
}
}
}
///////////////////////////////////////////////////////////////////////////////////////////////////////////
//(2) earese taken node from taken district
corners_nodes[taking_district].erase(std::remove(begin(corners_nodes[taking_district]), end(corners_nodes[taking_district]), taking_node), end(corners_nodes[taking_district]));
if (splt==2){
for (auto k = taking_node_split.begin(); k!= taking_node_split.end(); ++k) {
//cout << "ward_districts[*k][0] " << ward_districts[*k][0]<< endl;
if(ward_districts[*k][0]!=giving_district){
corners_nodes[ ward_districts[*k][0]].erase(std::remove(begin(corners_nodes[ ward_districts[*k][0]]), end(corners_nodes[ ward_districts[*k][0]]), *k), end(corners_nodes[ ward_districts[*k][0]]));
}
}
}
// first update the assigened district to each ward
ward_districts[taking_node][0]=giving_district;
if(inside==2){
ward_districts[taking_node_inside][0]=giving_district;
}
if (splt==2){
for (auto k = taking_node_split.begin(); k!= taking_node_split.end(); ++k) {
// cout << "ward_districts[*k][0] before " << ward_districts[*k][0]<< endl;
ward_districts[*k][0]=giving_district;
// cout << "ward_districts[*k][0] after " << ward_districts[*k][0]<< endl;
}
}
//cout << "first " << endl;
///////////////////////////////////////////////////////////////////////////////////////////////////////////
// (4)add taken node to given district corner
//if(std::find(corners_nodes[giving_district].begin(), corners_nodes[giving_district].end(), taking_node) == corners_nodes[giving_district].end()){
corners_nodes[giving_district].push_back(taking_node);
//}
if (splt==2){
for (auto k = taking_node_split.begin(); k!= taking_node_split.end(); ++k) {
//if(std::find(corners_nodes[giving_district].begin(), corners_nodes[giving_district].end(), *k) == corners_nodes[giving_district].end()){
if(ward_districts[*k][0]!=giving_district){
corners_nodes[giving_district].push_back(*k);
}
}
}
///////////////////////////////////////////////////////////////////////////////////////////////////////////
//(3)update the edges of given distict
myTempCorner.clear();
for (auto i=corners_nodes[giving_district].begin() ; i != corners_nodes[giving_district].end() ; i++) {
if ( adjacency [*i][taking_node]==1 ) {
// cout << "neigbor of given node" << *i << endl;
myTempCorner.push_back(*i);
}
if (splt==2){
for (auto k = taking_node_split.begin(); k!= taking_node_split.end(); ++k) {
if (adjacency [*i][*k]==1) {
// cout << "neigbor of given node" << *i << endl;
myTempCorner.push_back(*i);
}
}
}
}
sort( myTempCorner.begin(), myTempCorner.end() );
myTempCorner.erase(std::unique(myTempCorner.begin(), myTempCorner.end()),myTempCorner.end());
int u;
for (auto j=myTempCorner.begin(); j!=myTempCorner.end(); j++ ){
u=-1;
for (int i=0 ; i < corners_nodes.size() ; i++) {
for (int k=0 ; k < corners_nodes[i].size() ; k++) {
if(i!= giving_district ){
//if (corners_nodes[i][k]!= taking_node && adjacency [*j][corners_nodes[i][k]]==1){
if (adjacency [*j][corners_nodes[i][k]]==1){
u=0;
break;
// corners_nodes[taking_district].erase(corners_nodes[taking_district].begin()+ *j);
}
}
else{
break;
}
}
if (u==0) {
break;
}
}
if (u==-1) {
// cout << "Removed_node" <<*j << endl;
corners_nodes[giving_district].erase(std::remove(begin(corners_nodes[giving_district]), end(corners_nodes[giving_district]), *j), end(corners_nodes[giving_district]));
}
}
}
//*/
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
if(edges_output==true){
for (auto ii = districts_to_check_contiguity.begin(); ii != districts_to_check_contiguity.end(); ++ii) {
cout << "edges of district" << *ii << endl;
for (int j = 0; j < corners_nodes[*ii].size(); j++) {
cout << corners_nodes[*ii][j] << ",";
}
cout << endl;
}
}
AcceptanceTime02 += time(NULL)-myTime5_02;
long int myTime5_03=time(NULL);
HighPopNodes.clear();
HighPop.clear();
LowPopNodes.clear();
for (int i = 0; i <noDistricts ; i++){
if (abs(Popvect[i]-Pbar)/Pbar >= populationDeviation){
HighPopNodes.push_back(i);
HighPop.push_back(abs(Popvect[i]-Pbar)/Pbar);
}
}
//cout << "listsizeHP---------" << listsizeHP << endl;
listsizeHP = HighPopNodes.size();
if(detailed_output==true){
cout << "listsizeHP---------" << listsizeHP << endl;
}
AcceptanceTime03 += time(NULL)-myTime5_03;
}
iterations++;
//Compare the current set of districts (after "iterations" iterations and "steps" steps are taken)
//to the best set. If current districts better, set best district = current districts.
//if (firstPenalty < bestDistPenalty)
//if (firstPenaltyV < bestDistPenaltyV)
if ((listsizeHP==0) || (listsizeHP!=0 && firstPenalty < bestDistPenalty ) )
{
bestIteration = iterations;
bestStep = steps;
best_district_wards.clear();
best_district_wards.resize(noDistricts);
for (int i=0 ; i < district_wards.size(); i++){
for (int j=0 ; j < district_wards[i].size(); j++){
best_district_wards[i].push_back(district_wards[i][j]);
}
}
bestDistPenalty = firstPenalty;
bestDistPenaltyP = firstPenaltyP;
bestDistPenaltyV = firstPenaltyV;
bestDistPenaltyCom = firstPenaltyCom;
bestDistPenaltyR = firstPenaltyR;
OriginalBestDistPenalty = OriginalFirstPenalty;
OriginalBestDistPenaltyP = OriginalFirstPenaltyP;
OriginalBestDistPenaltyV = OriginalFirstPenaltyV;
OriginalBestDistPenaltyCom = OriginalFirstPenaltyCom;
OriginalBestDistPenaltyR = OriginalFirstPenaltyR;
bestBeforeNormalPenalty = beforeNormalFirstPop + beforeNormalFirstVote+ beforeNormalFirstCom+beforeNormalFirstRes;
bestBeforeNormalP = beforeNormalFirstPop;
bestBeforeNormalV = beforeNormalFirstVote;
bestBeforeNormalCom = beforeNormalFirstCom;
bestBeforeNormalR = beforeNormalFirstRes;
bestTime = time(NULL) - startTime;
}
AcceptanceTime += time(NULL)-myTime5;
runTime = time(NULL) - startTime;
if(evaluation_output==true){
cout << "temperature=" << temperature << endl;
cout << "steps=" << steps << endl;
}
if (accept==true && listsizeHP==0){
break;
//w1 = 0;
//w2 = 1;
//w3 = 1;
//firstPenalty= firstPenaltyStore/100;
}
} //while (iterations < NUM_ITERATIONS)
iterations=0;
/*
////////Reheat//////////
if (xx<=10)
{
if (steps<= 70000)
{
temperature*=TEMP_FACTOR;
}
else
{
//temperature= 0.3*START_TEMPERATURE;
temperature=10;
steps=30000;
int xx=xx+1;
}
}
else
{
//*/
temperature*=TEMP_FACTOR;
//}
if (accept == true&& listsizeHP==0){
break;
}
} //while (runTime < .... )
//.................
//} //while (temperature > FinalTemp)
///////////////////////////////////////
// Print result***************////////
/////////////////////////////////////
cout << endl;
cout << "\t" << "The best score was achieved after iteration " << bestIteration
<< " after " << bestStep << " steps." << endl;
cout << "\t" << "Objective value of best state = " << bestDistPenalty << endl;
myOutputFile << "-------------------------------------------------------------" << endl;
myOutputFile << "Population:" << endl;
double VoteP=0;
double VoteP1=0;
double VoteP2=0;
Rep=0;
Dem=0;
for (int i=0 ; i < best_district_wards.size(); i++){
double P=0;
double S=0;
double GapR=0;
double GapD=0;
double Temp=0;
double G=0;
double R=0;
double D=0;
double B=0;
double H=0;
for (int j=0 ; j < best_district_wards[i].size(); j++){
P +=subPop[best_district_wards[i][j]];
D += democrats[best_district_wards[i][j]];
B +=black [best_district_wards[i][j]];
H +=hispanic[best_district_wards[i][j]];
R += voter[best_district_wards[i][j]]-democrats[best_district_wards[i][j]];
S += area[best_district_wards[i][j]];
for (int k=0; k<numSubs; k++)
{
if(std::find(best_district_wards[i].begin(), best_district_wards[i].end(), k) == best_district_wards[i].end())
{
if(adjacency[best_district_wards[i][j]][k]==1){
Temp = Temp+bLength[best_district_wards[i][j]][k];
}
}
}
}
// Efficiency Gap
//*
G = ceil (0.50* (R+D));
if (D >= G) {
GapR = R ;
GapD = D - G;
}
else {
GapD = D ;
GapR = R - G;
}
//*/
if(detailed_output==true){
cout << "district[" << i << "]: PopDeviation=" << abs(P-Pbar)/ Pbar << " and populstion= " << P << " and ratio of democrats= " << D/(R+D) << " and democrats's gap= " << GapD <<" and Republicans' gap=" << GapR << " and compactness=" << (pow((Temp),2)/S) << endl;
}
//myOutputFile << "Population:" << endl;
//myOutputFile << "district[" << i << "]: PopDeviation=" << abs(P-Pbar)/ Pbar << " and populstion= " << P << " and ratio of democrats= " << D/(R+D) << " and democrats's gap= " << GapD <<" and Republicans' gap=" << GapR << " and compactness=" << (pow((Temp),2)/S) << endl;
//myOutputFile << "balck and Hispanic pop:" << endl;
//myOutputFile << i << " = " << P << " and " << B << " and " << H << " and " << B/P << " and " << H/P << endl;
//myOutputFile << "Democrats and Republicans and compactness:" << endl;
//myOutputFile << P << " and " << D << " and " << D+R << " and " << S << " and " << Temp << " and " << D-R << endl;
myOutputFile << i << " = " << P << " , " << abs(P-Pbar)/ Pbar << " , " << D << " , " << R << " , " << D+R << " , " << D/(R+D) << " , " << B << " , " << H << " , " << (pow((Temp),2)/S) << endl;
if (R > D){
Rep=Rep+1;
}
else{
Dem=Dem+1;
}
VoteP1 +=(GapD);
VoteP2 += (GapR);
};
VoteP = ((VoteP1)-(VoteP2));
myOutputFile << "-------------------------------------------------------------" << endl;
if ( VoteP1/VD >= VoteP2/VR ) {
myOutputFile << " Republicans have done better job" << endl;
}
else {
myOutputFile << " Democrats have done better job" << endl;
}
myOutputFile << "newGap=" << abs(VoteP1/VD-VoteP2/VR)/2 << endl;
myOutputFile << "OldGap=" << abs(VoteP/TVoter) << " -- " << abs(VoteP/TVoter + 0.5 * (VR-VD)/TVoter) << endl;
//myOutputFile << "MaxBoundOfVoteRatio" << 2*(VD -VoteP1)/TVoter << " and " << 2*(VR -VoteP2)/TVoter << endl;
myOutputFile << "#OfRepSeats=" << Rep << endl;
myOutputFile << "#OfDemSeats=" << Dem << endl;
//*/
cout << "\nTotal seconds elapsed = " << time(NULL) - startTime << endl;
// output file
//*
myOutputFile << endl;
myOutputFile << "-------------------------------------------------------------" << endl;
myOutputFile << "\t" << "The best score was achieved after iteration= " << bestIteration
<< " after " << bestStep << " steps." << endl;
myOutputFile << "\t" << "Objective value of best state = " << bestDistPenalty << endl;
myOutputFile << "\t" << "Objective value of weighted normalized pop, vot , com and Res = " << bestDistPenaltyP << "," <<bestDistPenaltyV << "," << bestDistPenaltyCom << bestDistPenaltyR <<endl;
myOutputFile << "\t" << "Objective value of best Original state = " << OriginalBestDistPenalty << endl;
myOutputFile << "\t" << "Objective value of not weighted but normalized pop, vote ,com, and Res= " << OriginalBestDistPenaltyP << "," << OriginalBestDistPenaltyV << "," << OriginalBestDistPenaltyCom << "," << OriginalBestDistPenaltyR <<endl;
myOutputFile << "\t" << "Objective value of not weighted not normalized pop, vote, com, and Res= " << bestBeforeNormalP << "," << bestBeforeNormalV << "," << bestBeforeNormalCom << "," << bestBeforeNormalR <<endl;
myOutputFile << "-------------------------------------------------------------" << endl;
myOutputFile << "noOfNeighbors=" << noOfGeneration << endl;
myOutputFile << "noOfFeasibleSolution=" << noOfFeasibleSolution << endl;
myOutputFile << "\nBestTime = " << bestTime << endl;
myOutputFile <<"\nneighbortime = "<< neighbortime<< endl;
//myOutputFile <<"\nneighbortime00 = "<< neighbortime00 << endl;
//myOutputFile <<"\nneighbortime01 = "<< neighbortime01 << endl;
//myOutputFile <<"\nneighbortime02 = "<< neighbortime02 << endl;
//myOutputFile <<"\nneighbortime03 = "<< neighbortime03 << endl;
myOutputFile <<"\ncontiguityTime = "<< contiguityTime << endl;
//myOutputFile <<"\ncontiguityTime00 = "<< contiguityTime00 << endl;
//myOutputFile <<"\ncontiguityTime01 = "<< contiguityTime01 << endl;
//myOutputFile <<"\ncontiguityTime02 = "<< contiguityTime02 << endl;
myOutputFile <<"\nfeasibleTime = "<< feasibleTime << endl;
myOutputFile <<"\nPenaltycaltime "<< Penaltycaltime << endl;
myOutputFile << "\nAcceptanceTime = "<< AcceptanceTime << endl;
//myOutputFile << "\nAcceptanceTime00 = "<< AcceptanceTime00 << endl;
//myOutputFile << "\nAcceptanceTime01 = "<< AcceptanceTime01 << endl;
//myOutputFile << "\nAcceptanceTime02 = "<< AcceptanceTime02 << endl;
//myOutputFile << "\nAcceptanceTime03 = "<< AcceptanceTime03 << endl;
myOutputFile << "\ninitialtime = "<< initialtime << endl;
myOutputFile << "\nTotal seconds elapsed = " << time(NULL) - startTime << endl;
myOutputFile << "\t" << "The best districts found was: ";
/*
for (int i=0 ; i < best_district_wards.size(); i++){
for (int j=0 ; j < best_district_wards[i].size(); j++){
myOutputFile <<best_district_wards[i][j] << ",";
}
}
//*/
////////////////////////////////////////////////////
//*
/*
myOutputFile3 <<" size()=" << myNotContiguousWards.size() << endl;
for (int i=0 ; i < myNotContiguousWards.size(); i++){
myOutputFile3 << myNotContiguousWards[i] << "," << endl ;
}
//*/
myOutputFile << "-------------------------------------------------------------" << endl;
cout << "Details of assignment: " << endl;
myOutputFile << "Details of assignment: " << endl;
for (int i=0 ; i < best_district_wards.size(); i++){
for (int j=0 ; j < best_district_wards[i].size(); j++){
//cout << "ward["<< best_district_wards[i][j] << "]is assigned to district[" << i <<"];" <<endl;
//myOutputFile << best_district_wards[i][j] << " " << i <<endl;
myOutputFile << i << " " << best_district_wards[i][j] << endl;
}
}
//*/
//*/
cout << "Details of address assignment: " << endl;
myOutputFile << "-------------------------------------------------------------" << endl;
myOutputFile << "Details of address assignment: " << endl;
for (int i=0 ; i < best_district_wards.size(); i++){
int sa=0;
int ra=0;
for (int j=0 ; j < best_district_wards[i].size(); j++){
//for (auto k = SenAddress.begin(); k!= SenAddress.end(); ++k) {
if(std::find(SenAddress.begin(), SenAddress.end(), best_district_wards[i][j]) == SenAddress.end()) {
continue;
}
else{
sa=sa+1;
}
}
for (int j=0 ; j < best_district_wards[i].size(); j++){
if(std::find(Address.begin(), Address.end(), best_district_wards[i][j]) == Address.end()) {
continue;
}
else{
ra=ra+1;
}
}
myOutputFile << i << " " << ra << " " << sa << endl;
}
//*/
// list of the neighbor districts for the best solution
myOutputFile << "-------------------------------------------------------------" << endl;
myOutputFile << "list of the neighbor districts: " << endl;
neighbor_districts .resize(noDistricts);
for (int i = 0; i < noDistricts ; i++) {
int y=-1;
for (int k=0 ; k < noDistricts ; k++) {
if (k==i) {
continue;
}
for (int j = 0; j < corners_nodes[i].size(); j++) {
for (int l = 0; l < corners_nodes[k].size(); l++) {
//cout << corners_nodes[k].size() << endl;
//cout << i <<" " << k << " "<< j << " " << l << endl;
if (adjacency [corners_nodes[i][j]][corners_nodes[k][l]] == 1) {
if(std::find(neighbor_districts[i].begin(), neighbor_districts[i].end(), k) == neighbor_districts[i].end()) {
neighbor_districts[i].push_back(k);
}
}
} }
}
}
for (int i=0 ; i < neighbor_districts.size(); i++){
for (int j=0 ; j < neighbor_districts[i].size(); j++){
//cout << "ward["<< best_district_wards[i][j] << "]is assigned to district[" << i <<"];" <<endl;
myOutputFile << i << " " << neighbor_districts[i][j] << endl;
}
}
myOutputFile.close();
//myOutputFile1.close();
//myOutputFile2.close();
//myOutputFile3.close();
//*/
/////////////////////////////////////////////////////////////////////////////////////////////////////////////
}
catch (IloException& ex)
{
cerr << "Error: " << ex << endl;
}
catch (...)
{
cerr << "Error" << endl;
}
env.end();
return 0;
}
//*
/////////////////////////////////////////////////////////////////////////////////////
static void readData1 (const char* filename, IloInt& noDistricts, IloNumArray& subPop)
{
ifstream in(filename);
if (in)
{
in >> noDistricts;
in >> subPop;
if(detailed_output==true){
cout << noDistricts <<endl;
cout << subPop <<endl;
}
}
else
{
cerr << "No such file: " << filename << endl;
throw(1);
}
}
////////////////////////////////////////////////////////////////////////////////////////////////
static void readData2 (const char* filename, IloNumArray& democrats)
{
ifstream in(filename);
if (in)
{
in >> democrats;
if(detailed_output==true){
cout << democrats <<endl;
}
}
else
{
cerr << "No such file: " << filename << endl;
throw(1);
}
}
//////////////////////////////////////////////////////////////////////////////////////////////////
static void readData3 (const char* filename, IloNumArray& republican)
{
ifstream in(filename);
if (in)
{
in >> republican;
if(detailed_output==true){
cout << republican <<endl;
}
}
else
{
cerr << "No such file: " << filename << endl;
throw(1);
}
}
/////////////////////////////////////////////////////////////////////////////////////////////
static void readData4 (const char* filename, IloNumArray& area)
{
ifstream in(filename);
if (in)
{
in >> area;
if(detailed_output==true){
cout << area <<endl;
}
}
else
{
cerr << "No such file: " << filename << endl;
throw(1);
}
}
/////////////////////////////////////////////////////////////////////////////
static void readData5 (const char* filename, IloNumArray& perimeter)
{
ifstream in(filename);
if (in)
{
in >> perimeter;
if(detailed_output==true){
cout << perimeter <<endl;
}
}
else
{
cerr << "No such file: " << filename << endl;
throw(1);
}
}
//////////////////////////////////////////////////////////////////////////////////
static void readData6 (const char* filename, IloNumArray& voter)
{
ifstream in(filename);
if (in)
{
in >> voter;
if(detailed_output==true){
cout << voter <<endl;
}
}
else
{
cerr << "No such file: " << filename << endl;
throw(1);
}
}
//////////////////////////////////////////////////////////////////////////////////////
static void readData7 (const char* filename, IloNumArray& amount)
{
ifstream in(filename);
if (in)
{
in >> amount;
if(detailed_output==true){
cout << amount <<endl;
}
}
else
{
cerr << "No such file: " << filename << endl;
throw(1);
}
}
//////////////////////////////////////////////////////////////////////////////////////////////////
static void readData8 (const char* filename, IloNumArray2& bLength)
{
ifstream in(filename);
if (in)
{
in >> bLength;
if(detailed_output==true){
//cout << bLength <<endl;
}
}
else
{
cerr << "No such file: " << filename << endl;
throw(1);
}
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
static void readData9 (const char* filename, IloNumArray2& adjacency)
{
ifstream in(filename);
if (in)
{
in >> adjacency;
if(detailed_output==true){
//cout << adjacency <<endl;
}
}
else
{
cerr << "No such file: " << filename << endl;
throw(1);
}
}
////////////////////////////////////////////////////////////////
static void readData10 (const char* filename, IloNumArray& black)
{
ifstream in(filename);
if (in)
{
in >> black;
if(detailed_output==true){
//cout << adjacency <<endl;
}
}
else
{
cerr << "No such file: " << filename << endl;
throw(1);
}
}
////////////////////////////////////////////////////////////////
static void readData11 (const char* filename, IloNumArray& hispanic)
{
ifstream in(filename);
if (in)
{
in >> hispanic;
if(detailed_output==true){
cout << hispanic <<endl;
}
}
else
{
cerr << "No such file: " << filename << endl;
throw(1);
}
}
////////////////////////////////////////////////////////////////
static void readData12 (const char* filename, IloNumArray& address)
{
ifstream in(filename);
if (in)
{
in >> address;
if(detailed_output==true){
cout << address <<endl;
}
}
else
{
cerr << "No such file: " << filename << endl;
throw(1);
}
}
////////////////////////////////////////////////////////////////
static void readData13 (const char* filename, IloNumArray& senAddress)
{
ifstream in(filename);
if (in)
{
in >> senAddress;
if(detailed_output==true){
cout << senAddress <<endl;
}
}
else
{
cerr << "No such file: " << filename << endl;
throw(1);
}
}
////////////////////////////////////////////////////////////////////
static void readData14 (const char* filename, IloNumArray2& split)
{
ifstream in(filename);
if (in)
{
in >> split;
if(detailed_output==true){
//cout << split <<endl;
}
}
else
{
cerr << "No such file: " << filename << endl;
throw(1);
}
}
////////////////////////////////////////////////////////////////
// Contiguity output
int traverseSubs(int node, IloNumArray2 boundry)
{
if(contiguety_output==true){
cout << "traverseSubs with node " << node << endl;
}
// cout << "traverseSubs with node " << node << endl;
traversed_nodes.push_back(node);
if(contiguety_output==true){
cout << "Current traversed_nodes: " ;
for (auto i = traversed_nodes.begin(); i != traversed_nodes.end(); ++i)
{
std::cout << *i << ' ';
}
cout << endl;
}
for (auto i = subs_in_district.begin(); i != subs_in_district.end(); ++i) {
// check if initial "node" and "sub i" are neighbors
if (boundry[node][*i] == 1) {
// check if sub i is not already traversed
if(std::find(traversed_nodes.begin(), traversed_nodes.end(), *i) == traversed_nodes.end())
{
// not traversed, so add it to the list of traversed nodes and
// call traverseSubs recursively with sub i
traverseSubs( *i, boundry );
if(contiguety_output==true){
cout << *i << " added to traversed_nodes" << endl;
}
}
else
{
if(contiguety_output==true){
cout << *i << " already exists in traversed_nodes" << endl;
}
}
}
}
return 0;
}<file_sep>/Komatsu/es_Backward.py
import numpy as np
import pandas as pd
import seaborn as sns
import scipy
from scipy import stats
from scipy.stats import circmean, circstd
from data_helper import FetchData
from es_cycle_parser import ESCycleParsing as cycle
from es_kpi import PropelKPI
import datetime
from datetime import datetime
import math
class BackwardDetection:
"""Backward Detection Class"""
def __init__(self):
pass
def back_angle (df , CL):
"""add 0,1 column to find shovel's back position
find the mean and standard deviation of upper_body_position for shovel's front side
find confidence interval for shovel's front side
find the indexes at which showel swings back and returns forward
:param: ES_Swg_Res_Cnt , ES_Sys_RHM_SAO_State, confidence level
:return: dataframe, time of swinging back and, time of returning forward
"""
df['upper_body_position']=df.ES_Swg_Res_Cnt *360/8192
c0=df.ES_Sys_RHM_SAO_State ==9.0
c1=df.ES_Sys_RHM_SAO_State ==5.0
c2=df.ES_Sys_RHM_SAO_State ==6.0
#c3=df.ES_Sys_RHM_SAO_State ==30.0
c4=df.ES_Sys_RHM_SAO_State ==10.0
#c5=df.ES_Sys_RHM_SAO_State ==8.0
c6=df.ES_Sys_RHM_SAO_State ==7.0
c7=df.ES_Sys_RHM_SAO_State ==11.0
#dd=df[c0|c1|c2|c3|c4|c5|c6|c7]
#dd=df[c0|c1|c2|c4|c6|c7]
dd=df[c0|c1|c2]
mu = circmean (dd['upper_body_position'], 360, 0)
std=circstd(dd['upper_body_position'], 360, 0)
conf_int = stats.norm.interval(CL, loc=mu, scale=std)
f=conf_int[0]
l=conf_int[1]
if l>360:
l=l-360
if f<0:
f=360+f
print (f)
m1=min(f,l)
m2=max(f,l)
print (m1)
print (m2)
if (mu > m1) and (mu < m2):
df['back_angle']=np.where(np.logical_or(df['upper_body_position']<m1,df['upper_body_position']>m2),1,0)
else:
df['back_angle']=np.where(np.logical_and(df['upper_body_position']<m2,df['upper_body_position']>m1),1,0)
df['intoback']=pd.DataFrame(df.back_angle.transpose().diff())
df1=df.copy()
df1.reset_index(inplace=True, drop=False)
goInside=df1.index[df1.intoback ==1]
goOutside=df1.index[df1.intoback ==-1]
return df, goInside, goOutside
def checkForState(T):
""" check for dig cycles, predig cycles and propel cycle in data"""
start30, end30 = cycle.get_start_end_times(T.ES_Sys_RHM_SAO_State, 30)
propcount= len(start30)
start5, end5 = cycle.get_start_end_times(T.ES_Sys_RHM_SAO_State, 5)
start6, end6 = cycle.get_start_end_times(T.ES_Sys_RHM_SAO_State, 6)
digcount= len(start5)+ len(start6)
dig = len(start6)
predig= len(start5)
return digcount , propcount , predig , dig
def propeltime(f):
"""calculate actual propel duration when propel mode is TRUE and tracks RPM is greater than
15% of max RPM for 4CAC salesforce model
"""
parser = cycle()
time_start, time_end = parser.get_start_end_times(f['ES_Op_Indn_Ppl_Status'], 1)
propel_cycles = pd.DataFrame(list(zip(time_start, time_end)),
columns=['PropelCycle_Start_Time', 'PropelCycle_End_Time'])
propel_cycles.index.name = 'ID'
total_propel_cycle_both = []
for z in range(propel_cycles.shape[0]):
result = f.loc[np.logical_and(f.index >= propel_cycles.iloc[z].PropelCycle_Start_Time,
f.index <= propel_cycles.iloc[z].PropelCycle_End_Time)]
single_propel_cycle_both = PropelKPI.actual_propel_time_bothtrack(result)
total_propel_cycle_both.append(single_propel_cycle_both)
return round(sum(total_propel_cycle_both),2)
def Backward (df, CL):
"""
when the shovel swings back, pickes up the cable and returns forward (start time, end time, and duration, \
and prople time for every Backward process for thr shovels)
:param: ES_Swg_Res_Cnt , ES_Sys_RHM_SAO_State, ES_Sys_RHM_SAO_State, ES_Op_Indn_Ppl_Status,ES_Ppl_Mtr_Rt_Spd_RPM,ES_Ppl_Mtr_Lt_Spd_RPM and, confidence level
:return: ['starttime' , 'endtime' , 'duration' , 'propelTime'] of each Backward
"""
TW=[0,0]
Backward=[]
propcount=0
digcount=0
Possiblity=BackwardDetection.back_angle(df,CL)
if len(Possiblity[1])==len(Possiblity[2]):
for i in range(len(Possiblity[1])-1):
if (Possiblity[0]['back_angle'].iloc[0]==1) and (i==0):
TW=Possiblity[0].iloc[:Possiblity[2][0]]
elif (Possiblity[0]['back_angle'].iloc[0]==1) and (i!=0) and (i!=len(Possiblity[1])-1):
TW=Possiblity[0].iloc[ Possiblity[1][i] : Possiblity[2][i+1]]
elif (Possiblity[0]['back_angle'].iloc[0]==0):
TW=Possiblity[0].iloc[ Possiblity[1][i] : Possiblity[2][i]]
else:
TW=Possiblity[0].iloc[ Possiblity[1][i] : ]
status=BackwardDetection.checkForState(TW)
if (((status[0]>=3)|(status[2]>=2)|(status[3]>=2)) & (status[1]>=2)):
propel=BackwardDetection.propeltime(TW)
Backward.append([TW.index[0], TW.index[len(TW)-1], \
(TW.index[len(TW)-1]-TW.index[0])/1000,propel])
TW=TW[0:0]
propcount=0
digcount=0
continue
else:
for i in range(len(Possiblity[1])-1):
if (Possiblity[0]['back_angle'].iloc[0]==1) and (i==0):
TW=Possiblity[0].iloc[:Possiblity[2][0]]
elif (Possiblity[0]['back_angle'].iloc[0]==1) and (i!=0):
TW=Possiblity[0].iloc[ Possiblity[1][i] : Possiblity[2][i+1]]
elif (Possiblity[0]['back_angle'].iloc[0]==0) and (i!=len(Possiblity[1])-1):
TW=Possiblity[0].iloc[ Possiblity[1][i] : Possiblity[2][i]]
else:
TW=Possiblity[0].iloc[ Possiblity[1][i] : ]
status=BackwardDetection.checkForState(TW)
if (((status[0]>=3)|(status[2]>=2)|(status[3]>=2)) & (status[1]>=2)):
propel=BackwardDetection.propeltime(TW)
Backward.append([TW.index[0], TW.index[len(TW)-1], \
(TW.index[len(TW)-1]-TW.index[0])/1000,propel])
TW=TW[0:0]
propcount=0
digcount=0
continue
Backward = pd.DataFrame (Backward , columns=['starttime' , 'endtime' , 'duration' , 'propelTime'])
for i in range(len(Backward)):
Backward.starttime.iloc[i]=datetime.utcfromtimestamp(Backward.starttime.iloc[i]/1000).strftime('%Y/%m/%d %H:%M:%S')
Backward.endtime.iloc[i]=datetime.utcfromtimestamp(Backward.endtime.iloc[i]/1000).strftime('%Y/%m/%d %H:%M:%S')
return Backward
<file_sep>/Machine learning/ML-R.Rmd
---
title: "EE711 Project"
author: "<NAME> and <NAME>"
date: "December 26, 2017"
output: html_document
---
We used "R Markdown" which is a report writing platform of R to prepare this report. R markdown enables us to illustrate explanations, codes, comments and outputs at the same time.
We picked the "Porto Seguro's Safe Driver Prediction" competition from Kaggle website for this project. Inaccuracies in car insurance company's claim predictions raise the cost of insurance for good drivers and reduce the price for bad ones.
In this competition, we were challenged to build a model that predicts the probability that a driver will initiate an auto insurance claim in the next year. While Porto Seguro has used machine learning for the past 20 years, they're looking to Kaggle's machine learning community to explore new, more powerful methods. A more accurate prediction will allow them to further tailor their prices, and hopefully make auto insurance coverage more accessible to more drivers.
We used three methods to predict the target drivers including a simple Logistic Regeression, Random Foreset, and Gradient Boosting. From our resluts, we found that the Logistic regression surprisingly outperforms Gradient Boosting and Random Forest failed to give any results even after a long run time. The better performance of Logistic regression than GB was not expected and at this state, we are not sure about reason and we will investigate it in later steps. In the following, we present all the data manipulation and training procedures indetail.
```{r ,echo=FALSE,warning=FALSE}
#Load required packages:
library(caret)
library(dplyr)
library(ggplot2)
library(tibble)
library(corrplot)
library(xgboost)
library(pROC)
```
##Data prepration
Read train and test data:
```{r}
training <- read.csv(file="train.csv",header = TRUE)
testing <- read.csv(file="test.csv",header = TRUE)
```
Create target column for testing data set to be able to combine train and test files with the purpose of data imputation:
```{r}
target <- rep(0,times=892816)
testing <- data.frame(testing$id,target,testing[,2:58])
names(testing) <- names(training)
```
Combine training anad testing data sets:
```{r}
combi <- rbind(training,testing)
```
We'll find and eliminate highly correlated variables:
```{r}
combi[,1:38] %>% cor(use="complete.obs",method="spearman") %>% corrplot(type="lower",tl.col="black",diag=FALSE)
```
It is observed that variables "ps_ind_14" and "ps_ind_12_bin" are highly correlated (r=0.92) and we can drop one of them. We selected "ps_ind_12_bin" to drop:
```{r}
combi$ps_ind_12_bin <- NULL
```
The next step is droping varibles with near zero variation. This procedure is
done with a function with the same name from caret package:
```{r}
nzv <- nearZeroVar(combi,freqCut = 95/5,saveMetrics = TRUE,names = TRUE, foreach = FALSE, allowParallel = TRUE)
#print(nzv)
dropList <- list(
"ps_ind_05_cat",
"ps_ind_10_bin",
"ps_ind_11_bin",
"ps_ind_13_bin",
"ps_ind_14",
"ps_reg_03",
"ps_car_10_cat"
)
for (d in dropList){
combi[, eval(d)] <- NULL
}
```
The next step in prepeocessing is dealing with missing values. We observed that 11 variables have missing values out of which 8 are categorical variables. We considered missing values as a new category and replaced them all with
the value of the new category:
```{r,echo=FALSE}
combi[combi==-1] <- NA
nalist <- lapply(combi,function(m) sum(is.na(m)))
nalist>0
unique(combi[,4])
combi[is.na(combi[,4]),4] <- 5
unique(combi[,6])
combi[is.na(combi[,6]),6] <- 2
unique(combi[,17])
combi[is.na(combi[,17]),17] <- 12
unique(combi[,18])
combi[is.na(combi[,18]),18] <- 2
unique(combi[,19])
combi[is.na(combi[,19]),19] <- 2
unique(combi[,21])
combi[is.na(combi[,21]),21] <- 2
unique(combi[,23])
combi[is.na(combi[,23]),23] <- 2
unique(combi[,25])
combi[is.na(combi[,25]),25] <- 5
```
For numeric variables with NA values, if the variable had less than 5% NA values, we filled NAs with median, otherwise we dropped the variable:
```{r}
sum(is.na(combi$ps_car_11))
combi$ps_car_11[is.na(combi$ps_car_11)] <- median(combi$ps_car_11,na.rm = TRUE)
sum(is.na(combi$ps_car_12))
combi$ps_car_12[is.na(combi$ps_car_12)] <- median(combi$ps_car_12,na.rm = TRUE)
sum(is.na(combi$ps_car_14))
combi$ps_car_14 <- NULL
```
The final step is to change the class of categorical variables to factor for Random Forest and GB as they accept factor as the dependent variable:
```{r}
catlist <- names(combi %>% select(ends_with("_cat")))
combi[, catlist] <- lapply(combi[, catlist], factor)
train <- filter(combi,combi$id %in% training$id)
test <- filter(combi,combi$id %in% testing$id)
```
##Logistic Regression
The first approach that we utilize is Logistic regression using glm function as shown below. Validation of Logestic regression using multiple ROC curve to measure the area under the curve was done and ROC curve is shown below with the area equal to 0.6141:
```{r,warning=FALSE}
inTrain <- createDataPartition(y=train$target, p=0.75, list=FALSE);
train1 <- train[inTrain,]
test1 <- train[-inTrain,]
logfit1 <- glm(target~.-id,data = train1,family = binomial(link = "logit"))
logfitpreds1 <- predict(logfit1, newdata = test1, type = "response")
roc.multi <- multiclass.roc(test1$target,logfitpreds1)
auc(roc.multi)
```
The training process on the full tarining data was done using Logistic Regression:
```{r,warning=FALSE}
logfit <- glm(target~.-id,data = train,family = binomial(link = "logit"))
logfitpreds <- predict(logfit, newdata = test, type = "response")
logistic <- data.frame(test$id,logfitpreds)
write.csv(logistic,"logistic.csv")
data.frame(preds = logfitpreds) %>%
ggplot(aes(x = logfitpreds)) +
geom_histogram(bins = 50, fill = 'grey50') +
labs(title = 'Histogram of Predictions') +
theme_bw()
range(logfitpreds)
```
Our score from this solution on Kaggle website was 0.23855 and our rank was 4296/5169 (min=-0.25, max=0.29698)
##Random Forest
The second approach that we utilize is Random Forest using train function from caret package as shown below:
```{r}
#rffit <- train(target~.,data = train,method="rf",prox=TRUE)
```
This procedure took too long such that we got no solution after one day run time of the program.
##Gradient Boosting
Next we used Gradiant Boosting method using XGBoost package as shown below:
Validation for Xgboost method(ROC curve) is done by the code block below giving Multi-class area under the curve equal to 0.6054.
```{r}
datamatrix2 <- data.matrix(train1[,3:50])
lable2 <- as.matrix(train1$target)
testmatrix2 <-data.matrix(test1[,3:50])
#xgbfitpreds2 <- predict(xgbfit2,testmatrix2)
xgbfit2 <- xgboost(data=datamatrix2,label = lable2 ,max.depth=700,eta=0.1,n_estimators=200, nthread=3, nround =10, objective = "binary:logistic")
xgbfitpreds2 <- predict(xgbfit2,testmatrix2)
roc.multi <- multiclass.roc(test1$target,xgbfitpreds2)
auc(roc.multi)
```
Xgboost method:
```{r}
datamatrix <- data.matrix(train[,3:50])
lable <- as.matrix(train$target)
xgbfit <- xgboost(data=datamatrix,label = lable ,max.depth=700,eta=0.1,n_estimators=200, nthread=3, nround =10, objective = "binary:logistic")
testmatrix <-data.matrix(test[,3:50])
xgbfitpreds <- predict(xgbfit,testmatrix)
gradiantboosting <- data.frame(test$id,xgbfitpreds)
write.csv(gradiantboosting,"gradiantboosting.csv")
data.frame(preds = xgbfitpreds) %>%
ggplot(aes(x = xgbfitpreds)) +
geom_histogram(bins = 50, fill = 'grey50') +
labs(title = 'Histogram of Predictions') +
theme_bw()
range(xgbfitpreds)
```
Our score from this solution on Kaggle website was 0.09310 and our rank was 4835/5169 (min=-0.25, max=0.29698).
Next we applied parameter tuning on XGBoost package (without any preprocessing). First we show the validation results with area under the curve equal to 0.5954:
```{r,warning=FALSE}
datamatrix23 <- data.matrix(train1[,3:50])
lable2_1 <- as.matrix(train1$target)
dtrain23 <- xgb.DMatrix(data = datamatrix23,label = lable2_1)
testmatrix23 <-data.matrix(test1[,3:50])
lable2_2 <- as.matrix(test1$target)
dtest23 <- xgb.DMatrix(data = testmatrix23,label = lable2_2)
params <- list(booster = "gbtree", objective = "binary:logistic", eta=0.3, gamma=0, max_depth=6, min_child_weight=1, subsample=1, colsample_bytree=1)
xgbcv3 <- xgb.cv( params = params, data = dtrain23, nrounds = 100, nfold = 5, showsd = T, stratified = T, print.every.n = 10, early.stop.round = 20, maximize = F)
xgbfit23 <- xgb.train(params = params, data = dtrain23, nrounds =4, watchlist = list(val=dtest23,train=dtrain23), print.every.n = 10, early.stop.round = 10, maximize = F , eval_metric = "error")
xgbfitpreds23 <- predict(xgbfit23,testmatrix23)
roc.multi <- multiclass.roc(test1$target,xgbfitpreds23)
auc(roc.multi)
```
Using XGBoost package (without any preprocessing):
```{r,warning=FALSE}
datamatrix2 <- data.matrix(training[,3:50])
lable2_1 <- as.matrix(training$target)
dtrain2 <- xgb.DMatrix(data = datamatrix2,label = lable2_1)
testmatrix2 <-data.matrix(testing[,3:50])
lable2_2 <- as.matrix(testing$target)
dtest2 <- xgb.DMatrix(data = testmatrix2,label = lable2_2)
params <- list(booster = "gbtree", objective = "binary:logistic", eta=0.3, gamma=0, max_depth=6, min_child_weight=1, subsample=1, colsample_bytree=1)
xgbcv <- xgb.cv( params = params, data = dtrain2, nrounds = 100, nfold = 5, showsd = T, stratified = T, print.every.n = 10, early.stop.round = 20, maximize = F)
xgbfit2 <- xgb.train(params = params, data = dtrain2, nrounds =4, watchlist = list(val=dtest2,train=dtrain2), print.every.n = 10, early.stop.round = 10, maximize = F , eval_metric = "error")
xgbfitpreds2 <- predict(xgbfit2,testmatrix2)
gradiantboosting2_1 <- data.frame(test$id,xgbfitpreds2)
write.csv(gradiantboosting2_1,"gradiantboosting2_1.csv")
data.frame(preds = xgbfitpreds2) %>%
ggplot(aes(x = xgbfitpreds2)) +
geom_histogram(bins = 50, fill = 'grey50') +
labs(title = 'Histogram of Predictions') +
theme_bw()
range(xgbfitpreds2)
```
Our score from this solution on Kaggle website was 0.23443 and our rank was 4333/5169 (min=-0.25, max=0.29698)
Finally we applied parameter tuning on XGBoost package (with preprocessing). First we show the validation results with area under the curve equal to 0.5954. This shows that XGBoost package can take care of data prepration fairly good.
```{r,warning=FALSE}
datamatrix34 <- data.matrix(train1[,3:50])
lable3_1 <- as.matrix(train1$target)
dtrain34 <- xgb.DMatrix(data = datamatrix34,label = lable3_1)
testmatrix34 <-data.matrix(test1[,3:50])
lable3_2 <- as.matrix(test1$target)
dtest34 <- xgb.DMatrix(data = testmatrix34,label = lable3_2)
params <- list(booster = "gbtree", objective = "binary:logistic", eta=0.3, gamma=0, max_depth=6, min_child_weight=1, subsample=1, colsample_bytree=1)
xgbcv <- xgb.cv( params = params, data = dtrain34, nrounds = 100, nfold = 5, showsd = T, stratified = T, print.every.n = 10, early.stop.round = 20, maximize = F)
xgbfit34 <- xgb.train(params = params, data = dtrain34, nrounds =6, watchlist = list(val=dtest34,train=dtrain34), print.every.n = 10, early.stop.round = 10, maximize = F , eval_metric = "error")
xgbfitpreds34 <- predict(xgbfit34,testmatrix34)
roc.multi <- multiclass.roc(test1$target,xgbfitpreds34)
auc(roc.multi)
```
Applying XGBoost package (with preprocessing)
```{r,warning=FALSE}
datamatrix3 <- data.matrix(train[,3:50])
lable3_1 <- as.matrix(train$target)
dtrain3 <- xgb.DMatrix(data = datamatrix3,label = lable3_1)
testmatrix3 <-data.matrix(test[,3:50])
lable3_2 <- as.matrix(testing$target)
dtest3 <- xgb.DMatrix(data = testmatrix3,label = lable3_2)
params <- list(booster = "gbtree", objective = "binary:logistic", eta=0.3, gamma=0, max_depth=6, min_child_weight=1, subsample=1, colsample_bytree=1)
xgbcv <- xgb.cv( params = params, data = dtrain3, nrounds = 100, nfold = 5, showsd = T, stratified = T, print.every.n = 10, early.stop.round = 20, maximize = F)
xgbfit3 <- xgb.train(params = params, data = dtrain3, nrounds =6, watchlist = list(val=dtest3,train=dtrain3), print.every.n = 10, early.stop.round = 10, maximize = F , eval_metric = "error")
xgbfitpreds3 <- predict(xgbfit3,testmatrix3)
gradiantboosting3_1 <- data.frame(test$id,xgbfitpreds3)
write.csv(gradiantboosting3_1,"gradiantboosting3_1.csv")
data.frame(preds = xgbfitpreds3) %>%
ggplot(aes(x = xgbfitpreds3)) +
geom_histogram(bins = 50, fill = 'grey50') +
labs(title = 'Histogram of Predictions') +
theme_bw()
range(xgbfitpreds3)
```
Our score from this solution on Kaggle website was 0.23456 and our rank was 4323/5169 (min=-0.25, max=0.29698)
From our results, we got the best rank using simple Logistic Regression and next Gradient Boosting with tuned parameters and preprocessing done by ourselfs while XGBOOST also showed a good performance in that.
<file_sep>/Machine learning/ML-Py-Kaggle-Porto Seguro's Safe Driver.py
#!/usr/bin/env python
# coding: utf-8
# In[2]:
pip install lightgbm
# In[3]:
pip install xgboost
# # Introduction
#
# # Objective
#
# ###### Predict if a driver will file an insurance claim next year
# # Data
#
# - Source of the data is kaggle, <NAME> Driver's Prediction Challenge.
# - Features that belong to similar groupings are tagged as such in the feature names (e.g., ind, reg, car, calc).
# - Feature names include the postfix bin to indicate binary features and cat to indicate categorical features.
# - Features without these designations are either continuous or ordinal.
# - Values of -1 indicate that the feature was missing from the observation.
# - The target columns signifies whether or not a claim was filed for that policy holder.
#
#
#
# # Import packages
# In[4]:
# libraries
import pandas as pd
import numpy as np
import seaborn as sns
import scipy as sp
import matplotlib.pyplot as plt
from sklearn.preprocessing import Imputer
from sklearn.pipeline import Pipeline
from sklearn.pipeline import make_pipeline
from sklearn.preprocessing import StandardScaler
from sklearn.model_selection import GridSearchCV
from sklearn.model_selection import RandomizedSearchCV
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LogisticRegression
from sklearn.ensemble import RandomForestClassifier
from xgboost import XGBClassifier
from sklearn.model_selection import cross_val_score
from lightgbm import LGBMClassifier
import lightgbm as lgb
from sklearn.pipeline import Pipeline
#import xgboost as xgb
#from sklearn.model_selection import KFold
#from sklearn.model_selection import train_test_split
# # Load Date
# In[5]:
train = pd.read_csv('C:/Users/royag/Desktop/Resume and Cover Letter/Interview projects/train.csv')
test = pd.read_csv('C:/Users/royag/Desktop/Resume and Cover Letter/Interview projects/test.csv')
# In[6]:
#print (train.shape)
#print (test.shape)
print ('train set: # of rows = {} and # of columns = {}' .format (train.shape[0], train.shape[1]))
print ('test set: # of rows = {} and # of columns = {}' .format (test.shape[0], test.shape[1]))
# In[7]:
#train.head()
train.columns
# In[5]:
#train.tail()
# ## Duplicate rows
#
# Look at the number of rows and columns in the train data and the test data.
# Check if we have the same number of variables in the test data.
# Check if there are duplicate rows in the training data.
# In[8]:
print (train.shape)
print (test.shape)
train.drop_duplicates()
print (train.shape)
# - Target variable is missing in test data set
# - There is no duplicate rows
# ## Data type
#
# Invesigate how many variables of each type we have.
# In[9]:
train.info()
# In[10]:
#make a copy of data sets
train_set=train.copy()
test_set=test.copy()
# ### Variable information in a dataframe
#
# - **role**: input, ID, target
# - **level**: Categorical, Real, Integer, Binary
# - **drop**: True or False
# - **category**: calculated, individual, registration, car
# - **dtype**: int, float, str
# In[11]:
data=[]
for var in train_set.columns:
# define the role of the features
if var=='target':
role='target'
elif var=='id':
role='id'
else:
role='input'
#define the level of the features
if 'bin' in var or var=='target':
level='binary'
#print (level)
elif 'cat' in var or var=='id':
level='categorical'
elif train_set[var].dtype==float:
level = 'real'
else:
level='integer'
# Defining the category of the features
category='00'
if 'ind' in var:
category = 'individual'
elif 'reg' in var:
category = 'registration'
elif 'car' in var:
category = 'car'
else:
category = 'calculated'
# define new boolean var to decide about keeping or dropping the features
drop=False
if var=='id':
drop=True
# define type of the features
dtype = train_set[var].dtype
my_dic= {'varname':var, 'role': role, 'level':level, 'drop':drop, 'category':category, 'dtype': dtype}
data.append (my_dic)
table_1= pd.DataFrame(data , columns=['varname', 'role' , 'level', 'drop', 'category', 'dtype'])
table_1.set_index('varname', inplace=True)
# In[10]:
table_1
# In[12]:
# extract categorical data
table_1[(table_1.level == 'categorical')].index
# In[13]:
# number of variables per role and level
pd.DataFrame({'count': table_1.groupby(['level', 'dtype'])['level'].size()}).reset_index()
# # Exploring the data
#
#
# ## - Target value
# In[14]:
plt.figure()
fig, ax = plt.subplots(figsize=(6,6))
x = train_set['target'].value_counts().index.values
y = train_set['target'].value_counts().values
# Bar plot
# Order the bars descending on target mean
sns.barplot(ax=ax, x=x, y=y)
plt.ylabel('Frequency', fontsize=12)
plt.xlabel('Target value', fontsize=12)
plt.tick_params(axis='both', which='major', labelsize=12)
for p in ax.patches:
percentage = '{:.2f}%'.format(100 * p.get_height()/len(train_set))
ax.annotate(percentage,(p.get_x() + p.get_width()/2 , p.get_y()+ p.get_height()), ha = 'center', va = 'center', xytext = (0, 9),textcoords = 'offset points')
# for p in ax.patches:
# ax.text(p.get_x(), p.get_height(), str(round((p.get_height()/len(train_set))*100, 2))+'%', fontsize=15, color='black',\
# ha='center' , va='center')
plt.show();
# In[15]:
train_set['target'].value_counts()
# ## - Categorical Varibles
# In[16]:
cat_vars = [e for e in train_set.columns if e.endswith('cat')]
for e in cat_vars:
dist_values = train_set[e].value_counts().shape[0]
print('Variable {} has {} distinct values'.format(e, dist_values))
# In[17]:
for f in cat_vars:
print (f)
fig, ax = plt.subplots(figsize=(6,6))
# Calculate the percentage of target=1 per category value
cat_percentage = train_set[[f, 'target']].groupby([f],as_index=False).mean()
cat_percentage.sort_values(by='target', ascending=False, inplace=True)
# Bar plot
# Order the bars descending on target mean
sns.barplot(ax=ax,x=f, y='target', data=cat_percentage, order=cat_percentage[f])
plt.ylabel('Percent of target with value 1 [%]', fontsize=12)
plt.xlabel(f , fontsize=12)
plt.tick_params(axis='both', which='major', labelsize=12)
plt.show();
# # Data Preprocessing
#
# In[17]:
#train_set.iloc[:, 2:].columns
# for i in range(train_set.shape[1]-2):
# plt.boxplot(train_set.iloc[:, i])
# plt.show()
# ## - check missing values
# In[18]:
missingValues1 = []
missingPer1=[]
for i in train_set.columns:
missings1 = train_set[train_set[i] == -1][i].count()
if missings1 > 0:
missingValues1.append(i)
missingPer1.append(missings1/train_set.shape[0])
missingPercent1 = missings1/train_set.shape[0]
print('Variable {} in train data has {} records ({:.2%}) with missing values'.format(i, missings1, missingPercent1))
print('In total, there are {} variables with missing values in train data'.format(len(missingValues1)))
missingValues2 = []
missingPer2=[]
for i in test_set.columns:
missings2 = test_set[test_set[i] == -1][i].count()
if missings2 > 0:
missingValues2.append(i)
missingPer2.append(missings2/test_set.shape[0])
missingPercent2 = missings2/test_set.shape[0]
print('Variable {} in test data has {} records ({:.2%}) with missing values'.format(i, missings2, missingPercent2))
print('In total, there are {} variables with missing values in test data'.format(len(missingValues2)))
# ### - Visualize the missing values of the features
# In[19]:
missing_vars1 = pd.DataFrame(sorted(zip(missingPer1, missingValues1)), columns=["Value", "Feature"])
plt.figure(figsize=(16, 5))
sns.barplot(x="Value", y="Feature", data=missing_vars1.sort_values(by="Value", ascending=False))
missing_vars2 = pd.DataFrame(sorted(zip(missingPer2, missingValues2)), columns=["Value", "Feature"])
plt.figure(figsize=(16, 5))
sns.barplot(x="Value", y="Feature", data=missing_vars2.sort_values(by="Value", ascending=False))
# ### - Drop variables with too many missing values
# In[20]:
vars_drop = ['ps_car_03_cat', 'ps_car_05_cat']
#vars_drop = ['ps_reg_03', 'ps_car_14']
train_set.drop(vars_drop, inplace=True, axis=1)
test_set.drop(vars_drop, inplace=True, axis=1)
#table_1.loc[(vars_drop),'drop'] = True # Updating table_1
# ### - Replace -1 values with NaN
# In[21]:
train_set = train_set.replace(-1, np.nan)
test_set = test_set.replace(-1, np.nan)
# ### - Imputing the missing values
# In[22]:
cat_cols = [c for c in train_set.columns if 'cat' in c]
bin_cols = [c for c in train_set.columns if 'bin' in c]
con_cols = [c for c in train_set.columns if c not in (bin_cols + cat_cols) ] #real and integer
del con_cols[:2]
print (con_cols)
# Imputing with the mean or mode
mean_imp = Imputer(missing_values=np.nan , strategy='mean', axis=0)
mode_imp = Imputer(missing_values=np.nan , strategy='most_frequent', axis=0)
for c in cat_cols:
train_set[c] = mode_imp.fit_transform(train_set[[c]]).ravel()
test_set[c] = mode_imp.transform(test_set[[c]]).ravel()
for c in bin_cols:
train_set[c] = mean_imp.fit_transform(train_set[[c]]).ravel()
test_set[c] = mean_imp.transform(test_set[[c]]).ravel()
for c in con_cols:
train_set[c] = mean_imp.fit_transform(train_set[[c]]).ravel()
test_set[c] = mean_imp.transform(test_set[[c]]).ravel()
"""
for c in cat_cols:
train_set[c].fillna(value=train_set[c].mode()[0], inplace=True)
test_set[c].fillna(value=test_set[c].mode()[0], inplace=True)
for c in bin_cols:
train_set[c].fillna(value=train_set[c].mode()[0], inplace=True)
test_set[c].fillna(value=test_set[c].mode()[0], inplace=True)
for c in con_cols:
train_set[c].fillna(value=train_df[c].mean(), inplace=True)
test_set[c].fillna(value=test_df[c].mean(), inplace=True)
"""
# ## - Corrolation between the variables
# In[23]:
correlations = train_set.corr()
# Create color map ranging between two colors
cmap = sns.diverging_palette(200, 10, as_cmap=True)
fig, ax = plt.subplots(figsize=(10,10))
sns.heatmap(correlations, cmap=cmap, vmax=1, center=0, fmt='.2f',
square=True, linewidths=.25, annot=False, cbar_kws={"shrink": .5})
plt.show();
# ### - Find highly correlated Vars
# In[24]:
for i in correlations.columns:
for j in correlations.columns:
if (correlations[i][j])>0.5 and j>i:
print ('{} and {} ({:.2%})'.format (i , j , correlations[i][j]))
# In[27]:
#vars_drop = ['ps_ind_14', 'ps_car_13', 'ps_car_04_cat']
# train_set.drop(vars_drop, inplace=True, axis=1)
# test_set.drop(vars_drop, inplace=True, axis=1)
# ## - Dummify categorical variables
# In[25]:
cat_vars = [e for e in train_set.columns if e.endswith('cat')]
def encode_cat_vars(df, cat_vars):
for c in cat_vars:
temp = pd.get_dummies(pd.Series(df[c]), prefix=c)
df = pd.concat([df, temp],axis=1)
df = df.drop([c],axis=1)
return df
train_set = encode_cat_vars(train_set, cat_vars)
test_set = encode_cat_vars(test_set, cat_vars)
# In[26]:
print(train_set.shape)
print(test_set.shape)
# In[27]:
train_target = train_set['target']
train_target_value = train_set['target'].values
train_y= np.array(train['target'])
train_id= train['id']
train_id_value= train['id'].values
test_id=test['id']
test_id_value=test['id'].values
# ### - Drop "id" and "target" columns from both train and test set
# In[28]:
train_set = train_set.drop(['target','id'], axis = 1)
test_set = test_set.drop(['id'], axis = 1)
# ## - Feature Selection
# In[29]:
model = LGBMClassifier(n_estimators=2000, learning_rate=0.1, max_depth=-1, min_data_in_leaf = 1, min_sum_hessian_in_leaf = 1.0)
#model= RandomForestClassifier(n_estimators=1000, random_state=0, n_jobs=-1)
X = train_set
y = train_target_value
#y = train_target
model.fit(X, y)
features_imp = pd.DataFrame(sorted(zip(model.feature_importances_, X.columns)), columns=["Value", "Feature"])
print (features_imp)
plt.figure(figsize=(16, 50))
sns.barplot(x="Value", y="Feature", data=features_imp.sort_values(by="Value", ascending=False))
# ### - Drop the features that are not significant
# In[30]:
sorted_columns = sorted(zip(model.feature_importances_, X.columns))
# print (sorted_columns_by_fi)
col_to_drop = [x[1] for x in sorted_columns if x[0] < 100]
#col_to_drop = [x[1] for x in sorted_columns if x[0] < 0.0025]
#print (col_to_drop)
train_set_imp = train_set.drop(col_to_drop, axis=1)
test_set_imp = test_set.drop(col_to_drop, axis=1)
# ### - Drop calculated features
# In[31]:
col_drop = train_set.columns[train_set.columns.str.startswith('ps_calc_')]
train_WOCalc = train_set_imp.drop(col_drop, axis=1)
test_WOCalc = test_set_imp.drop(col_drop, axis=1)
# In[32]:
print(train_set_imp.shape)
print(train_WOCalc.shape)
print(test_set_imp.shape)
print(test_WOCalc.shape)
# # Scaling and preparing models
# ### XGBoost
# In[33]:
pipe_steps = [('scaler', StandardScaler()), ('xgboost',XGBClassifier())]
params = {
'xgboost__min_child_weight': [5, 10],
'xgboost__gamma': [2, 5],
'xgboost__subsample': [ 0.8, 1.0],
'xgboost__colsample_bytree': [0.8, 1.0],
'xgboost__max_depth': [4, 5]
}
pipeline = Pipeline(pipe_steps)
#X= train_set_imp
X = train_WOCalc
y = train_target_value
# In[34]:
cv_xgb = GridSearchCV(pipeline, params, cv=10, scoring='roc_auc')
#cv_xgb = RandomizedSearchCV(pipeline, params, cv=10, scoring='roc_auc')
# In[35]:
print (cv_xgb)
# In[ ]:
cv_xgb.fit(X, y)
print(cv_xgb.best_params_)
print(cv_xgb.best_score_)
# In[40]:
y_test = cv_xgb.predict_proba(test_WOCalc)
results_df = pd.DataFrame(data={'id':test['id'], 'target':y_test[:,1]})
results_df.to_csv('submission-grid-search-xgb-cv10-porto-01.csv', index=False)
# ### LGBM
# In[42]:
pipe_steps = [('scaler', StandardScaler()), ('LGBM',LGBMClassifier())]
params = {
'LGBM__n_estimators':[50, 100, 500, 1000]
}
pipeline = Pipeline(pipe_steps)
#X= train_set_imp
X = train_WOCalc
y = train_target_value
# In[43]:
cv_lgbm = GridSearchCV(pipeline, params, cv=10, scoring='roc_auc')
#cv = RandomizedSearchCV(pipeline, params, cv=5)
# In[44]:
cv_lgbm.fit(X, y)
print(cv_lgbm.best_params_)
print(cv_lgbm.best_score_)
# In[45]:
y_test = cv_lgbm.predict_proba(test_WOCalc)
results_df = pd.DataFrame(data={'id':test['id'], 'target':y_test[:,1]})
results_df.to_csv('submission-grid-search-LGBM-cv10-porto-01.csv', index=False)
# ### Random forest
# In[46]:
pipe_steps = [('scaler', StandardScaler()), ('RF', RandomForestClassifier())]
params = { #'features__text__tfidf__max_df': [0.9, 0.95],
#'features__text__tfidf__ngram_range': [(1,1), (1,2)],
#'RF__learning_rate': [0.1, 0.2],
'RF__n_estimators': [30, 50],
'RF__max_depth': [2, 4],
'RF__min_samples_leaf': [2, 4]}
pipeline = Pipeline(pipe_steps)
#X= train_set_imp
X = train_WOCalc
y = train_target
# In[47]:
cv_rf = GridSearchCV(pipeline, params, cv=10, scoring='roc_auc')
#cv = RandomizedSearchCV(pipeline, params, cv=5)
# In[48]:
cv_rf.fit(X, y)
print(cv_rf.best_params_)
print(cv_rf.best_score_)
# In[49]:
y_test = cv_rf.predict_proba(test_WOCalc)
results_df = pd.DataFrame(data={'id':test['id'], 'target':y_test[:,1]})
results_df.to_csv('submission-grid-search-RF-CV10-porto-01.csv', index=False)
# ### Logistic regression
# In[50]:
pipe_steps = [('scaler', StandardScaler()), ('lg', LogisticRegression())]
params = {'lg__solver': ['newton-cg', 'sag', 'lbfgs']
# ,'lg__multi_class':['ovr', 'multinomial']
}
pipeline = Pipeline(pipe_steps)
#X= train_set_imp
X = train_WOCalc
y = train_target
# In[51]:
cv_lg = GridSearchCV(pipeline, params, cv=10, scoring='roc_auc')
#cv = RandomizedSearchCV(pipeline, params, cv=5)
# In[52]:
cv_lg.fit(X, y)
print(cv_lg.best_params_)
print(cv_lg.best_score_)
# In[53]:
y_test = cv_lg.predict_proba(test_WOCalc)
results_df = pd.DataFrame(data={'id':test['id'], 'target':y_test[:,1]})
results_df.to_csv('submission-grid-search-lg-cv10-porto-01.csv', index=False)
# In[ ]:
# In[101]:
results_df.head()
# In[102]:
results_df.to_csv('submission-random-grid-search-LGBM-porto-01.csv', index=False)
# In[41]:
import os
os.getcwd()
# In[1]:
cd "C:\\Users\\royag\\Desktop\\Resume and Cover Letter\\Interview projects"
# In[100]:
# Function including all classifiers
model=[('XGBoost', XGBClassifier())]
#,
# ('Randomforest', RandomForestClassifier()),
# ('LogisticRegression', LogisticRegression()),
# ('lightgbm',LGBMClassifier())]
p_xgboost= {
'xgboost__min_child_weight': [5, 10],
'xgboost__gamma': [2, 5],
'xgboost__subsample': [ 0.8, 1.0],
'xgboost__colsample_bytree': [0.8, 1.0],
'xgboost__max_depth': [4, 5]
}
p_rf= {#'RF__learning_rate': [0.1, 0.2],
'RF__n_estimators': [30, 50],
'RF__max_depth': [2, 4],
'RF__min_samples_leaf': [2, 4]}
p_lg= params = {'lg__solver': ['newton-cg', 'sag', 'lbfgs']
# ,'lg__multi_class':['ovr', 'multinomial']
}
p_lgbm= {
'LGBM__n_estimators':[50, 100, 500, 1000]
}
params = [p_xgboost]
# , p_rf, p_lg, p_lgbm]
cv_range=[3,5,10]
# scoring = ['accuracy', 'precision_weighted', 'recall_weighted', 'f1_weighted', 'roc_auc']
def Best_predict (X,y):
b_score=0
for i in range(len (model)):
for c in cv_range:
pipe_steps = [('scaler', StandardScaler()), (model[i])]
cv = GridSearchCV(pipeline, params[i], cv=c, scoring='roc_auc')
#cv = RandomizedSearchCV(pipeline, params, cv=c, scoring='roc_auc')
try:
cv.fit(X, y)
if cv.best_score_ > b_score:
b_score = cv.best_score_
b_params=cv.best_params_
b_model=i
y_test = cv.predict_proba(test_WOCalc)
#print ('model {} best parameters are {} and best score iis {}' .format(i, cv.best_params_, cv.best_score_)
except:
print(str(i) + " pass")
pass
print ('more accurate model is {}, best parameters are {}, and best score is {}'.format( b_model, b_params, b_score))
results_df = pd.DataFrame(data={'id':test['id'], 'target':y_test[:,1]})
results_df.to_csv('submission-random-grid-search-'+ str(b_model) +'-porto-01.csv', index=False)
# In[101]:
Best_predict(train_WOCalc, train_target)
# ## GridsearchCV
# In[66]:
# cv = GridSearchCV(pipeline, params, cv=10, scoring='roc_auc')
# #cv = RandomizedSearchCV(pipeline, params, cv=5)
# ### Fitting on train set
# In[ ]:
# cv.fit(X, y)
# print(cv.best_params_)
# print(cv.best_score_)
# ### Prediction and preparing the output
# In[85]:
# y_test = cv.predict_proba(test_WOCalc)
# results_df = pd.DataFrame(data={'id':test['id'], 'target':y_test[:,1]})
# results_df.to_csv('submission-random-grid-search-LGBM-porto-01.csv', index=False)
|
a4f490d08a9672b917ebfe2601b27d3914c28152
|
[
"Python",
"C++",
"RMarkdown"
] | 4
|
C++
|
royaghorashi62/work-sample
|
012ac4b0d183ec1483e77f2cb2bec3fb3b5fad33
|
2a8976b203d19638c1d124fc3e3405eee96dd78d
|
refs/heads/master
|
<repo_name>cmscardoso/djangohunter<file_sep>/djangohunter.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Title: Django Hunter
# Date: 2018-11-08
# Author: <NAME>
# Shodan Dork: ('DisallowedHost at /', 'DisallowedHost', 'KeyError', 'OperationalError', 'Page not found at /')
try:
from pyfiglet import Figlet
except ImportError as e:
print("Error: %s \n" % (e))
print("Try this ... pip install -r /path/to/requirements.txt")
import argparse
import sys
from utils.color import Color
from utils.search import Search
from concurrent.futures import ThreadPoolExecutor
if __name__ == '__main__':
graph = Figlet(font='slant').renderText('djangoHunter')
print(graph)
print("""
Tool designed to help identify incorrectly configured
Django applications that are exposing sensitive information.\n
""")
parser = argparse.ArgumentParser(description='Django Hunter', usage=None)
parser.add_argument('--dork', '-s', required=False, metavar='dork', default='title:"DisallowedHost"', help='Search for dork shodan i.e DisallowedHost at /')
parser.add_argument('--key', '-k', required=True, metavar='API key', help='Shodan API key')
parser.add_argument( '--limit','-l', type=int, default=999, required=False, metavar='limit', help='Limit results returned by shodan')
parser.add_argument( '--timeout','-t', type=float, required=False, default=5, metavar='timeout', help='Timeout default: 5')
parser.add_argument( '--threads', type=float, required=False, default=5, metavar='threads', help='Threads default: 5')
args = parser.parse_args()
if len(sys.argv) <= 2:
parser.print_help()
django = Search(api=args.key, dork=args.dork, limit=args.limit, timeout=args.timeout)
with ThreadPoolExecutor(max_workers=args.threads) as executor:
concurrents = executor.map(django.load, django.urls)
<file_sep>/README.md
# Djangohunter
##### Tool designed to help identify incorrectly configured Django applications that are exposing sensitive information.
https://www.reddit.com/r/django/comments/87qcf4/28165_thousand_django_running_servers_are_exposed/
https://twitter.com/6ix7ine/status/978598496658960384?lang=en
### Usage
```
Usage: python3 djangohunter.py --key {shodan}
Dorks: 'DisallowedHost', 'KeyError', 'OperationalError', 'Page not found at /'
```
### Requirements
- Shodan
- Pyfiglet
- Requests
- BeautifulSoup
```pip -r install requirements```
### Demo
[](https://asciinema.org/a/210648)
### Disclaimer
Code samples are provided for educational purposes. Adequate defenses can only be built by researching attack techniques available to malicious actors. Using this code against target systems without prior permission is illegal in most jurisdictions. The authors are not liable for any damages from misuse of this information or code.
## Donations
* XMR: `49m12JEEC6HPCHkLMX5QL4SrDQdKwh6eb4Muu8Z9CwA9MwemhzFQ3VcgHwyuR73rC22WCymTUyep7DVrfN3GPt5JBCekPrR `
<file_sep>/requirements.txt
shodan
beautifulsoup4
requests
pyfiglet
<file_sep>/utils/search.py
try:
import shodan
import requests
from bs4 import BeautifulSoup
except ImportError as e:
print("Error: %s \n" % (e))
print("Try this ... pip install -r /path/to/requirements.txt")
from utils.color import Color
class Search(object):
"""docstring for ClassName"""
def __init__(self, api, dork='"DisallowedHost"', limit=None, offset=None, timeout=None) :
self.shodan = shodan.Shodan(api)
self.limit = limit
self.offset = offset
self.timeout = timeout
self._urls = []
self.color = Color()
try:
results = self.shodan.search(dork, limit=self.limit, offset=offset)
matches = results['matches']
total = results['total']
print('{} Shodan found {} hosts with debug mode enabled'.format(self.color.status("[+]"), total))
print("{} Looking for secret keys wait a moment ..\n".format(self.color.yellows("[!]")))
for match in matches:
self.ipadress = match['ip_str']
self.port = match['port']
self.hostnames = match['hostnames']
self.org = match['org']
self.domains = match['domains']
self.city = match['location']['city']
self.country = match['location']['country_name']
# Skip hosts with SSL
if self.port == 443 :
continue
self._urls.append(['http://{}:{}'.format(self.ipadress, self.port)])
except shodan.APIError as error:
print("error: {}".format(error))
pass
@property
def urls(self):
return self._urls
def load(self, urls):
for url in urls:
counter = 0
mapping = (
'DB_HOST',
'AWS',
'MYSQL',
'RDS_HOSTNAME',
'ADMIN_USER',
'RABBITMQ_HOST',
'WALLET_RW_HOST',
'POSTGRES_PASSWORD',
'KYC_API_KEY',
'DATABASE_URL',
'AUTO_RECRAW_HOST',
'BONANZA_API_KEY',
'CELERY',
'MWS_ACCESS_KEY',
'PROXY_SECRET',
'KEEPA_API',
'MONGODB_PASSWORD',
'SCRAPYMONGO_PASSWORD',
'FACE_ID_DB_PASSWORD',
'AWS_SECRET_ACCESS_KEY',
'GOOGLE_OAUTH2_CLIENT_SECRET',
'POSTGRES_PASSWORD',
'DJANGO_SECRET_KEY',
'FIREBASE_SERVER_KEY',
'GOOGLE_API_KEY',
'SSH_PASSWORD',
'SSH_AUTH',
'RABBITMQ_DEFAULT_PASS',
'AWS_SECRET_KEY',
'AWS_S3_BUCKET',
'SENDGRID_PASSWORD',
'PAYU_KEY',
'DHL_API_CLIENT_SECRET',
'LIGHT_PASSWORD',
'DB_PASSWORD',
'ATEL_AUTH_SECRET',
'GPG_KEY',
'Facebook',
'Google',
'Yahoo',
'Github',
'Stack',
'GEOSERVER',
'RDS_PASSWORD',
'SMTP_PASSWORD'
) # Interesting keywords ('DisallowedHost at /', 'DisallowedHost', 'KeyError', 'OperationalError', 'Page not found at /', '')
self.hostname = ', '.join(str(hostname) for hostname in self.hostnames)
self.domain = ', '.join(str(domain) for domain in self.domains)
try:
request = requests.get('{}'.format(url), timeout=self.timeout)
html = BeautifulSoup(request.text, 'html.parser')
keys = []
for key in mapping :
if key in html.prettify():
keys.append(key)
keys = ', '.join(str(key) for key in keys) # Keywords found
if len(keys) != 0:
print("[+] Possible exposed credentials on {}".format(request.url))
print('[+] Secret keys found {}\n'.format(self.color.error(keys)))
# some information about the host
print("\tOrganization: {}\n\tHostnames: {}\n\tDomains: {}\n\tCity: {}\n\tCountry: {}\n".format(self.org, self.hostname, self.domain, self.city, self.country))
except requests.exceptions.RequestException as error:
continue
# Keep track of how many results have been downloaded so we don't use up all our query credits
counter += 1
if counter >= self.limit:
break
|
89ef2f902b86e4257185cd129a56c6c798f119ee
|
[
"Markdown",
"Python",
"Text"
] | 4
|
Python
|
cmscardoso/djangohunter
|
cb05dad131655bc5f923d9d94341551b32d175de
|
1de1399f6f2c4c9ce146ccd15abfd52cd144cfb7
|
refs/heads/master
|
<repo_name>victorvoid/layden<file_sep>/src/main.rs
extern crate gtk;
mod gui {
pub mod gtk3;
}
fn main(){
gui::gtk3::launch();
}
<file_sep>/src/gui/gtk3/mod.rs
use gtk::{self};
use gtk::prelude::*;
pub fn launch(){
gtk::init().unwrap_or_else(|_| panic!("panic!"));
let builder = gtk::Builder::new_from_string(include_str!("window_app.ui"));
let window: gtk::Window = builder.get_object("window_app").unwrap();
window.show_all();
window.connect_delete_event(|_, _| {
gtk::main_quit();
Inhibit(false)
});
gtk::main();
}
<file_sep>/Cargo.toml
[package]
name = "layden"
version = "0.1.0"
authors = ["<NAME> <<EMAIL>>"]
[dependencies]
gtk = { git = "https://github.com/gtk-rs/gtk.git", features = ["v3_16"] }
gdk = { git = "https://github.com/gtk-rs/gdk.git", features = ["v3_16"] }<file_sep>/README.md
# layden
A vector design tool entirely focused on user interface design.
## How to run development environment
You should do this before editing any code to see how it works.
1. You need install the [GTK here](http://gtk-rs.org/docs/requirements.html)
2. run `git clone <EMAIL>:victorvoid/layden.git`
3. run `cargo build`
4. run `./target/debug/layden`
License
-------
The code is available under the [MIT License](LICENSE.md).
|
5b83f019de0acb75a6d4dc42263240691d3dfc10
|
[
"TOML",
"Rust",
"Markdown"
] | 4
|
Rust
|
victorvoid/layden
|
1b9e6848ea95986668520a31e8f3fc75e9289b03
|
e05089b183287f1e8278979c2dee8562beb4229d
|
refs/heads/master
|
<file_sep>import Vue from 'vue'
import './plugins/axios'
import App from './App.vue'
import router from './router'
import store from './store'
import './assets/animate.min.css'
//全局注册 axios
// import axios from 'axios';
// import vueAxios from 'vue-axios';
import './plugins/vant.js'
import 'lib-flexible/flexible'
import VueCookies from 'vue-cookies'
import { NavBar,Icon,Form,Button,Field,Popup,Toast,Search,Swipe, SwipeItem,Lazyload,Tabbar, TabbarItem,Tag,GoodsAction, GoodsActionIcon, GoodsActionButton,Checkbox,SwipeCell,Stepper,SubmitBar,List,AddressList,Empty,AddressEdit,Cell,Dialog,TreeSelect,Tab,Tabs,Uploader } from 'vant';
Vue.use(NavBar).use(Icon).use(Form).use(Button).use(Field).use(Popup).use(Toast).use(Search).use(Swipe).use(SwipeItem).use(Lazyload).use(Tabbar).use(TabbarItem).use(Tag).use(GoodsAction).use(GoodsActionButton).use(GoodsActionIcon).use(Checkbox).use(SwipeCell).use(Stepper).use(SubmitBar).use(List).use(AddressList).use(Empty).use(AddressEdit).use(Cell).use(Dialog).use(TreeSelect).use(Tab).use(Tabs).use(Uploader);
// Vue.use(vueAxios).use(axios)
Vue.use(VueCookies);
axios.interceptors.request.use(config => {
if (config.method === 'post') {
let paramsStr = '';
for (let key in config.data) {
paramsStr += `${key}=${config.data[key]}&`;
}
config.data = paramsStr.slice(0, -1);
}
return config;
})
Vue.config.productionTip = false
Vue.prototype.baseURL = "http://www.kangliuyong.com:10002"
Vue.prototype.appkey = '<KEY>0xB1iV06BeNA='
new Vue({
router,
store,
render: h => h(App)
}).$mount('#app')
<file_sep>import { Toast } from 'vant';
class RegInput {
RegIpt(o){
for(var key in o){
if(!o[key].reg.test(o[key].value)){
Toast({
message:o[key].errorMsg,
forbidClick:true
});
return false;
}
}
return true;
}
}
export const utils = new RegInput()
|
b88a17cb9694f27bcf24873fd1431693d5be78d6
|
[
"JavaScript"
] | 2
|
JavaScript
|
Thh123-gif/vue_coffee_store
|
a572ea29b989e5ae18214807d99743d846c9f6ee
|
447ef7900e3a871f38906d9c380cdf5bb559eb4e
|
refs/heads/master
|
<file_sep>'use strict';
var React = require('react-native');
var {
AppRegistry,
StyleSheet,
Text,
View,
Navigator,
Component,
TouchableHighlight
} = React;
var App = React.createClass({
getInitialState() {
return { name: 'Jerome' }
},
renderScene(route, navigator) {
if(route.name == 'Main') {
return <Main navigator={navigator} name={this.state.name} {...route.passProps} />
}
if(route.name == 'Home') {
return <Home navigator={navigator} name={this.state.name} {...route.passProps} />
}
}
,
render() {
console.log('this. state', this.state)
return (
<Navigator
style={{ flex:1 }}
initialRoute={{ name: 'Main' }}
renderScene={ this.renderScene } />
)
}
})
var Main = React.createClass({
_navigate(name) {
this.props.navigator.push({
name: 'Home',
passProps: {
name: name
}
})
},
render() {
return (
<View style={ styles.container }>
<Text style={ styles.heading }>This is Page 1</Text>
<TouchableHighlight style={ styles.button } onPress={ () => this._navigate('Jerome ~') }>
<Text style={ styles.buttonText }>Go Page2 </Text>
</TouchableHighlight>
</View>
)
}
})
var Home = React.createClass({
render() {
return (
<View style={ styles.container }>
<Text style={ styles.heading }>This is Page 2, Hello from { this.props.name }</Text>
<TouchableHighlight style={ styles.button } onPress={ () => this.props.navigator.pop() }>
<Text style={ styles.buttonText }>Go back Page 1</Text>
</TouchableHighlight>
</View>
)
}
})
var styles = StyleSheet.create({
container: {
flex: 1,
marginTop: 80
},
heading: {
fontSize:22,
marginBottom:10
},
button: {
height:60,
backgroundColor: '#efefef',
alignItems: 'center',
justifyContent: 'center'
},
buttonText: {
fontSize:20
}
});
module.exports = App;
|
59e6a38fd8a98e6663dd8e6e123df383f9b1dcb4
|
[
"JavaScript"
] | 1
|
JavaScript
|
mrjerometw/reactnativeJS_SimpleNavigator
|
193c8c536228ee19d52656954ea6585853832b31
|
d035a7cfa8555d93d4b764652ac1d630d600c49f
|
refs/heads/master
|
<file_sep>package com.baojie.cache.dao;
import com.baojie.common.entity.User;
import com.baojie.common.mapper.MyMapper;
/**
* @author jbj
* @create 2019-07-04 11:21
*/
/**
* readWrite=false表示只读,返回同一个对象
* readWrite=true表示读写,经过序列化返回两个个对象,实体必须序列化
* flushInterval
*/
//@CacheNamespace(flushInterval = 2, readWrite = false, size = 10)
//@CacheNamespaceRef(UserDao.class)
public interface UserDao extends MyMapper<User> {
User getUser2(Integer id);
}
<file_sep><?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>2.1.6.RELEASE</version>
<relativePath/> <!-- lookup parent from repository -->
</parent>
<groupId>com.baojie</groupId>
<artifactId>mybatis</artifactId>
<version>0.0.1-SNAPSHOT</version>
<name>mybatis</name>
<description>Demo project for Spring Boot</description>
<modules>
<module>common</module>
<!--mybatis + mapper + pagehelper + druid使用配置文件方式实现-->
<module>mybatis_yml</module>
<!--mybatis + mapper + pagehelper + druid使用java代码方式实现-->
<module>mybatis_conf</module>
<!--mybatis + mapper + pagehelper + druid使用配置文件方式实现多数据源-->
<module>mybatis_conf2</module>
<!--mybatis + mapper + pagehelper + druid使用配置文件方式实现动态添加切换数据源-->
<module>mybatis_conf3</module>
<!--mybatis一级二级缓存-->
<module>mybatis_cache</module>
<!-- mybatis拦截器 -->
<module>mybatis_interceptor</module>
</modules>
<properties>
<java.version>1.8</java.version>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<spring-cloud-dependencies.version>Dalston.RELEASE</spring-cloud-dependencies.version>
<spring-cloud-starter-eureka.version>1.3.5.RELEASE</spring-cloud-starter-eureka.version>
<spring-cloud-starter-feign.version>1.3.2.RELEASE</spring-cloud-starter-feign.version>
<fastjson.version>1.2.48-SNAPSHOT</fastjson.version>
<mysql.version>8.0.11</mysql.version>
<pagehelper.version>1.2.10</pagehelper.version>
<mapper-spring-boot-starter.version>2.1.5</mapper-spring-boot-starter.version>
<mybatis-spring-boot-starter.version>1.3.1</mybatis-spring-boot-starter.version>
<springfox-swagger2.version>2.7.0</springfox-swagger2.version>
</properties>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>fastjson</artifactId>
<version>${fastjson.version}</version>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>${mysql.version}</version>
</dependency>
<dependency>
<groupId>org.mybatis.spring.boot</groupId>
<artifactId>mybatis-spring-boot-starter</artifactId>
<version>${mybatis-spring-boot-starter.version}</version>
</dependency>
<dependency>
<groupId>tk.mybatis</groupId>
<artifactId>mapper-spring-boot-starter</artifactId>
<version>${mapper-spring-boot-starter.version}</version>
</dependency>
<dependency>
<groupId>com.github.pagehelper</groupId>
<artifactId>pagehelper-spring-boot-starter</artifactId>
<version>${pagehelper.version}</version>
<exclusions>
<exclusion>
<groupId>org.mybatis.spring.boot</groupId>
<artifactId>mybatis-spring-boot-starter</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>com.alibaba</groupId>
<artifactId>druid-spring-boot-starter</artifactId>
<version>1.1.10</version>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-data-redis</artifactId>
</dependency>
<!-- mybatis的二级缓存 -->
<dependency>
<groupId>org.mybatis.caches</groupId>
<artifactId>mybatis-redis</artifactId>
<version>1.0.0-beta2</version>
</dependency>
</dependencies>
</dependencyManagement>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<optional>true</optional>
</dependency>
<!-- 注意版本冲突,高版本的swagger引用的guava版本较高,与spring冲突-->
<dependency>
<groupId>io.springfox</groupId>
<artifactId>springfox-swagger-ui</artifactId>
<version>${springfox-swagger2.version}</version>
</dependency>
<dependency>
<groupId>io.springfox</groupId>
<artifactId>springfox-swagger2</artifactId>
<version>${springfox-swagger2.version}</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
</project>
<file_sep>package com.baojie.interceptor.conf;
import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.spring.boot.autoconfigure.DruidDataSourceBuilder;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
/**
* @author jbj
* @create 2019-07-04 10:29
*/
@Configuration
public class DataSourceConfig {
@Bean
public DruidDataSource druidDataSource() {
return DruidDataSourceBuilder.create().build();
}
}
<file_sep>package com.baojie.mybatis_conf.controller;
import com.baojie.common.entity.User3;
import com.baojie.mybatis_conf.dao.User3Dao;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
/**
* @author jbj
* @create 2019-07-01 14:55
*/
@RestController
public class User3Controller {
@Autowired
private User3Dao user3Dao;
@GetMapping("/getUser3")
public List<User3> selectUser() {
return user3Dao.selectAll();
}
}
<file_sep>package com.baojie.mybatis_conf2.enm;
/**
* @author jbj
* @create 2019-07-04 10:33
*/
public enum DBTypeEnum {
/**
* 主数据源
*/
MASTER,
/**
* 从数据源1
*/
SLAVE,
/**
* 从数据源2
*/
SLAVE2;
}
<file_sep>package com.baojie.mybatis_conf2.aop;
import com.baojie.mybatis_conf2.conf.DBContextHolder;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Before;
import org.aspectj.lang.annotation.Pointcut;
import org.springframework.stereotype.Component;
/**
* @author jbj
* @create 2019-07-04 10:58
* 定义两个切点,查询相关使用slave数据源增删改使用master数据源
*/
@Aspect
@Component
public class DataSourceAop {
@Pointcut("(execution(* com.baojie.mybatis_conf2.service.*Service.select*(..)) " +
"|| execution(* com.baojie.mybatis_conf2.service.*Service.get*(..)))")
public void readPointcut() {}
@Pointcut("@annotation(com.baojie.mybatis_conf2.anno.Master) " +
"|| execution(* com.baojie.mybatis_conf2.service.*Service.insert*(..)) " +
"|| execution(* com.baojie.mybatis_conf2.service.*Service.add*(..)) " +
"|| execution(* com.baojie.mybatis_conf2.service.*Service.update*(..)) " +
"|| execution(* com.baojie.mybatis_conf2.service.*Service.edit*(..)) " +
"|| execution(* com.baojie.mybatis_conf2.service.*Service.delete*(..)) " +
"|| execution(* com.baojie.mybatis_conf2.service.*Service.remove*(..))")
public void writePointcut() {}
@Before("readPointcut()")
public void read() {
DBContextHolder.slave();
}
@Before("writePointcut()")
public void write() {
DBContextHolder.master();
}
}
<file_sep>package com.baojie.cache;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import tk.mybatis.spring.annotation.MapperScan;
@SpringBootApplication
@MapperScan("com.baojie.cache.dao")
public class CacheApplication {
public static void main(String[] args) {
SpringApplication.run(CacheApplication.class, args);
}
}
<file_sep>package com.baojie.common.base;
import com.baojie.common.mapper.MyMapper;
import org.springframework.beans.factory.annotation.Autowired;
import tk.mybatis.mapper.entity.Example;
import java.lang.reflect.ParameterizedType;
import java.util.List;
import java.util.Map;
/**
* @author jbj
* @create 2019-07-06 16:03
*/
public class BaseServiceImpl<M extends MyMapper, T> implements BaseServie<T> {
@Autowired
protected M mapper;
@Override
public int insert(T t) {
return mapper.insert(t);
}
@Override
public int update(T t) {
return mapper.updateByPrimaryKeySelective(t);
}
@Override
public int delete(T t) {
return mapper.delete(t);
}
/**
* TODO
* @param paramMap
* @return
*/
@Override
public List<T> selectList(Map<String, Object> paramMap) {
return mapper.selectByExample(null);
}
@Override
public List<T> selectAll() {
return mapper.selectAll();
}
/**
* TODO
* @param id
* @return
*/
@Override
public T getById(String id) {
Class<T> clazz = (Class<T>) ((ParameterizedType) getClass().getGenericSuperclass()).getActualTypeArguments()[1];
Example example = new Example(clazz);
Example.Criteria criteria = example.createCriteria();
criteria.andEqualTo("id", id);
// criteria.andEqualTo("isDel", 0);
List<T> list = mapper.selectByExample(example);
if (list.isEmpty()) {
return null;
}
return list.get(0);
}
@Override
public T getById(Integer id) {
return getById(id.toString());
}
}
<file_sep>package com.baojie.interceptor.service;
import com.baojie.common.base.BaseServie;
import com.baojie.common.entity.User;
import org.apache.ibatis.session.RowBounds;
import java.util.List;
/**
* @author jbj
* @create 2019-07-04 11:18
*/
public interface UserService extends BaseServie<User> {
/**
* 添加分页查询
* @param rowBounds
* @return
*/
List<User> selectAll2(RowBounds rowBounds);
}
<file_sep>package com.baojie.common.entity;
import lombok.Data;
import lombok.EqualsAndHashCode;
/**
* @author jbj
* @create 2019-05-13 12:02
*/
@Data
@EqualsAndHashCode
public class User3 {
private Integer id;
private String name;
}
<file_sep>package com.baojie.mybatis_conf2.service;
import com.baojie.common.entity.User;
import java.util.List;
/**
* @author jbj
* @create 2019-07-04 11:18
*/
public interface UserService {
User getUser(int id);
List<User> selectUser();
int insert(User user);
int update(User user);
}
<file_sep>package com.baojie.common.entity;
import lombok.Data;
import tk.mybatis.mapper.annotation.NameStyle;
import javax.persistence.Table;
/**
* @author jbj
* @create 2019-07-06 8:27
*/
@Data
@Table(name = "data_source")
@NameStyle
public class DataSourceEntity extends BaseEntity {
public DataSourceEntity(String ip, int port, String instanceName, String username, String password) {
this.ip = ip;
this.port = port;
this.instanceName = instanceName;
this.username = username;
this.password = <PASSWORD>;
}
public DataSourceEntity(String dbKey, String ip, int port, String instanceName, String username, String password) {
this.keyName = dbKey;
this.ip = ip;
this.port = port;
this.instanceName = instanceName;
this.username = username;
this.password = <PASSWORD>;
}
/**
* 标识当前数据源字段
*/
private String keyName;
/**
* IP地址
*/
private String ip;
/**
* 端口
*/
private int port;
/**
* 默认数据库实例
*/
private String instanceName;
/**
* 用户名
*/
private String username;
/**
* 密码
*/
private String password;
}
<file_sep>package com.baojie.mybatis_conf.dao;
import com.baojie.common.entity.User;
import com.baojie.common.mapper.MyMapper;
/**
* @author jbj
* @create 2019-07-01 14:55
*/
public interface UserDao extends MyMapper<User> {
}
<file_sep>package com.baojie.dymic.controller;
import com.baojie.common.entity.DataSourceEntity;
import com.baojie.dymic.conf.DynamicDatasource;
import com.baojie.dymic.service.DataSourceEntityService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import javax.sql.DataSource;
import java.util.List;
/**
* @author jbj
* @create 2019-07-06 17:19
*/
@RestController
@RequestMapping("/datasource")
public class DataSourceEntityController {
@Autowired
private DataSourceEntityService dataSourceEntityService;
/**
* 创建数据源放到内存中,并持久化
* @param dataSourceEntity
* @return
*/
@PostMapping("insert")
public int insert(@RequestBody DataSourceEntity dataSourceEntity) {
DataSource dataSource = DynamicDatasource.createDataSource(dataSourceEntity);
//存储数据源集合
DynamicDatasource.addDataSourceMap(dataSourceEntity.getKeyName(), dataSource);
return dataSourceEntityService.insert(dataSourceEntity);
}
@GetMapping("selectList")
public List<DataSourceEntity> selectList() {
return dataSourceEntityService.selectAll();
}
@GetMapping("getById")
public DataSourceEntity getById(int id) {
return dataSourceEntityService.getById(id);
}
}
<file_sep># mybatis
learn mybatis
mybatis_yml:配置文件形式配置sptingboot + mybatis + durid + pageHelper + mapper
mybatis_conf:代码形式配置sptingboot + mybatis + durid + pageHelper + mapper
mybatis_conf2:代码形式配置sptingboot + mybatis + durid + pageHelper + mapper 读写分离实现
mybatis_conf3:代码形式配置sptingboot + mybatis + durid + pageHelper + mapper 通过接口形式动态添加,删除动态数据源
mybatis_cache:mybatis缓存
mybatis_interceptor:mybatis插件学习,分页插件<file_sep>package com.baojie.dymic.aop;
import com.baojie.dymic.conf.DBContextHolder;
import com.baojie.dymic.constant.DataSourceConstant;
import com.baojie.common.entity.BaseEntity;
import org.aspectj.lang.JoinPoint;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Before;
import org.aspectj.lang.annotation.Pointcut;
import org.springframework.stereotype.Component;
import org.springframework.web.context.request.RequestContextHolder;
import org.springframework.web.context.request.ServletRequestAttributes;
import javax.servlet.http.HttpServletRequest;
/**
* @author jbj
* @create 2019-07-04 10:58
* 定义切点,所有controller方法找到dbKey,切换数据源
*/
@Aspect
@Component
public class DataSourceAop {
@Pointcut("execution(* com.baojie.dymic.controller.*Controller.*(..))")
public void pointcut() {}
@Before("pointcut()")
public void read(JoinPoint joinPoint) {
Object[] args = joinPoint.getArgs();
if (args.length > 0) {
// 接收到请求,记录请求内容
ServletRequestAttributes attributes = (ServletRequestAttributes) RequestContextHolder.getRequestAttributes();
HttpServletRequest request = attributes.getRequest();
String httpMethod = request.getMethod();
if ("get".equalsIgnoreCase(httpMethod)) {
//规定接口必须传dbKey
String dbKey = request.getParameter(DataSourceConstant.DB_KEY);
DBContextHolder.setDbKey(dbKey);
} else {
//放到body里面数据参数
if (args[0] instanceof BaseEntity) {
//规定post请求第一个参数为实体,实体传DBKey
BaseEntity entity = (BaseEntity) args[0];
DBContextHolder.setDbKey(entity.getDbKey());
}
}
} else {
//没有带DBKey则使用默认数据源
DBContextHolder.setDbKey(DataSourceConstant.DEFAULT_DS);
}
}
}
<file_sep>package com.baojie.dymic.dao;
import com.baojie.common.entity.User;
import com.baojie.common.mapper.MyMapper;
/**
* @author jbj
* @create 2019-07-04 11:21
*/
public interface UserDao extends MyMapper<User> {
}
<file_sep>package com.baojie.mybatis_conf2.controller;
import com.baojie.common.entity.User;
import com.baojie.mybatis_conf2.service.UserService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
/**
* @author jbj
* @create 2019-07-04 11:16
*/
@RestController
public class UserController {
@Autowired
private UserService userService;
@GetMapping("/getUser")
public User getUser(int id) {
return userService.getUser(id);
}
@GetMapping("/selectUser")
public List<User> selectUser() {
return userService.selectUser();
}
@PostMapping("/insert")
public int insert(@RequestBody User user) {
return userService.insert(user);
}
}
<file_sep>package com.baojie.dymic.service.impl;
import com.baojie.common.entity.User;
import com.baojie.dymic.anno.Master;
import com.baojie.dymic.dao.UserDao;
import com.baojie.dymic.service.UserService;
import com.github.pagehelper.PageHelper;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.List;
/**
* @author jbj
* @create 2019-07-04 11:20
*/
@Service("userService")
public class UserServiceImpl implements UserService {
@Autowired
private UserDao userDao;
@Override
public User getUser(int id) {
User user = new User();
user.setId(id);
return userDao.selectOne(user);
}
@Override
@Master
public List<User> selectUser() {
PageHelper.startPage(2, 1);
return userDao.selectAll();
}
@Override
public int insert(User user) {
return userDao.insert(user);
}
@Override
public int update(User user) {
return userDao.updateByPrimaryKeySelective(user);
}
}
<file_sep>package com.baojie.dymic.controller;
import com.baojie.common.base.BaseServie;
import com.baojie.common.entity.BaseEntity;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestParam;
import java.util.List;
import java.util.Map;
/**
* @author jbj
* @create 2019-07-07 11:27
*/
public class BaseController<T extends BaseEntity, Biz extends BaseServie<T>> {
@Autowired
private Biz biz;
@PostMapping("insert")
public T insert(@RequestBody T t) {
biz.insert(t);
return t;
}
@PostMapping("update")
public T update(@RequestBody T t) {
biz.update(t);
return t;
}
@PostMapping("delete")
public int delete(@RequestBody T t) {
return biz.delete(t);
}
@GetMapping("select")
public List<T> select(@RequestParam Map<String, Object> paramMap) {
return biz.selectList(paramMap);
}
@GetMapping("getById")
public T getById(@RequestParam Object id) {
if (id instanceof String) {
return biz.getById((String) id);
} else {
return biz.getById((Integer) id);
}
}
}
<file_sep>package com.baojie.interceptor.dialect;
import org.apache.ibatis.cache.CacheKey;
import org.apache.ibatis.mapping.BoundSql;
import org.apache.ibatis.session.RowBounds;
import java.util.List;
import java.util.Properties;
/**
* @author jbj
* @create 2019-07-23 14:44
* @description 数据库方言
*/
public interface Dialect {
/**
* 跳过count和分页查询
* @param msId 执行的mybatis方法全名
* @param parameterObject 方法参数
* @param rowBounds 分页参数
* @return true跳过,默认查询结果;false执行分页查询
*/
boolean skip(String msId, Object parameterObject, RowBounds rowBounds);
/**
* 执行分页前,返回true进行count查询,返回false会继续进行下面的beforeCount
* @param msId
* @param parameterObject
* @param rowBounds
* @return
*/
boolean beforeCount(String msId, Object parameterObject, RowBounds rowBounds);
/**
* 生成count查询sql
* @param boundSql 绑定sql对象
* @param parameterObject
* @param rowBounds
* @param countKey
* @return
*/
String getCountSql(BoundSql boundSql, Object parameterObject, RowBounds rowBounds, CacheKey countKey);
/**
* 执行完 count 查询后
*
* @param count 查询结果总数
* @param parameterObject 接口参数
* @param rowBounds 分页参数
*/
void afterCount(long count, Object parameterObject, RowBounds rowBounds);
/**
* 执行分页前,返回 true 会进行分页查询,false 会返回默认查询结果
*
* @param msId 执行的 MyBatis 方法全名
* @param parameterObject 方法参数
* @param rowBounds 分页参数
* @return
*/
boolean beforePage(String msId, Object parameterObject, RowBounds rowBounds);
/**
* 生成分页查询 sql
*
* @param boundSql 绑定 SQL 对象
* @param parameterObject 方法参数
* @param rowBounds 分页参数
* @param pageKey 分页缓存 key
* @return
*/
String getPageSql(BoundSql boundSql, Object parameterObject, RowBounds rowBounds, CacheKey pageKey);
/**
* 分页查询后,处理分页结果,拦截器中直接 return 该方法的返回值
*
* @param pageList 分页查询结果
* @param parameterObject 方法参数
* @param rowBounds 分页参数
* @return
*/
Object afterPage(List pageList, Object parameterObject, RowBounds rowBounds);
/**
* 设置参数
*
* @param properties 插件属性
*/
void setProperties(Properties properties);
}
<file_sep>package com.baojie.interceptor.dialect;
import lombok.Data;
import org.apache.ibatis.session.RowBounds;
/**
* @author jbj
* @create 2019-07-25 9:31
* @description 添加总数量属性
*/
@Data
public class PageRowBounds extends RowBounds {
private long total;
public PageRowBounds() {
super();
}
public PageRowBounds(int offset, int limit) {
super(offset, limit);
}
}
<file_sep>package com.baojie.interceptor.service.impl;
import com.baojie.common.base.BaseServiceImpl;
import com.baojie.common.entity.User;
import com.baojie.interceptor.dao.UserDao;
import com.baojie.interceptor.service.UserService;
import org.apache.ibatis.session.RowBounds;
import org.springframework.stereotype.Service;
import java.util.List;
/**
* @author jbj
* @create 2019-07-04 11:20
*/
@Service("userService")
public class UserServiceImpl extends BaseServiceImpl<UserDao, User> implements UserService {
@Override
public List<User> selectAll2(RowBounds rowBounds) {
return mapper.selectAll2(rowBounds);
}
}
<file_sep>package com.baojie.cache.service.impl;
import com.baojie.cache.service.UserService;
import com.baojie.common.base.BaseServiceImpl;
import com.baojie.common.entity.User;
import com.baojie.cache.dao.UserDao;
import org.springframework.stereotype.Service;
/**
* @author jbj
* @create 2019-07-04 11:20
*/
@Service("userService")
public class UserServiceImpl extends BaseServiceImpl<UserDao, User> implements UserService {
}
<file_sep>package com.baojie.mybatis_yml;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import tk.mybatis.spring.annotation.MapperScan;
@SpringBootApplication
@MapperScan("com.baojie.mybatis_yml.dao")
public class ProviderMybatisApplication {
public static void main(String[] args) {
SpringApplication.run(ProviderMybatisApplication.class, args);
}
}
<file_sep>package com.baojie.mybatis_conf.controller;
import com.baojie.common.entity.User;
import com.baojie.mybatis_conf.dao.UserDao;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
/**
* @author jbj
* @create 2019-07-01 14:55
*/
@RestController
public class UserController {
@Autowired
private UserDao userDao;
@GetMapping("/getUser")
public List<User> selectUser() {
return userDao.selectAll();
}
}
<file_sep>package com.baojie.mybatis_conf2.anno;
import java.lang.annotation.*;
/**
* @author jbj
* @create 2019-07-04 11:30
* 自定义注解,有@Master注解的方法强制读主库
*/
@Documented
@Target(ElementType.METHOD)
@Retention(RetentionPolicy.RUNTIME)
public @interface Master {
String value() default "";
}
<file_sep>package com.baojie.common.entity;
import lombok.Data;
import javax.persistence.Transient;
import java.io.Serializable;
/**
* @author jbj
* @create 2019-07-07 10:58
*/
@Data
public class BaseEntity implements Serializable {
/**
* 数据库标识,根据这个字段的值进行数据源切换,如果没有则使用默认数据源
*/
@Transient
private String dbKey;
}
<file_sep>package com.baojie.common.mapper;
import tk.mybatis.mapper.common.Mapper;
/**
* @author jbj
* @create 2019-05-13 11:54
*/
public interface MyMapper<T> extends Mapper<T> {
}
|
a61f99975b7037e0f8e5724139a4f0d9a2fd21f4
|
[
"Markdown",
"Java",
"Maven POM"
] | 29
|
Java
|
jibaojie/mybatis
|
be76c2ac2ca351e4fcdbd1769849e03a7e3acc69
|
a5ca4f4cbc28c3fff9bb2cd6e2faebd71c14032e
|
refs/heads/master
|
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace GISModel.Entidades
{
[Table("tbPerigoPotencial")]
public class PerigoPotencial : EntidadeBase
{
[Key]
public string IDPerigoPotencial { get; set; }
[Display(Name = "Evento Perigoso Potencial")]
public string DescricaoEvento { get; set; }
}
}
<file_sep>using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System;
namespace GISModel.Entidades
{
[Table("tbAnaliseRisco")]
public class AnaliseRisco : EntidadeBase
{
[Key]
public string IDAnaliseRisco { get; set; }
[Display(Name = "Atividade Alocada")]
public string IDAtividadeAlocada { get; set; }
[Display(Name = "Alocação")]
public string IDAlocacao { get; set; }
[Display(Name = "Atividade")]
public string IDAtividadesDoEstabelecimento { get; set; }
[Display(Name = "Eventos Perigosos Adicionais")]
public string IDEventoPerigoso { get; set; }
[Display(Name = "Perigo Adicional")]
public string IDPerigoPotencial { get; set; }
public bool Conhecimento { get; set; }
public bool BemEstar { get; set; }
public virtual AtividadeAlocada AtividadeAlocada { get; set; }
public virtual Alocacao Alocacao { get; set; }
}
}
<file_sep>using GISCore.Business.Abstract;
using GISModel.Entidades;
using System;
using System.Collections.Generic;
using System.Configuration;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace GISCore.Business.Concrete
{
public class AnaliseRiscoBusiness : BaseBusiness<AnaliseRisco>, IAnaliseRiscoBusiness
{
public override void Inserir(AnaliseRisco pAnaliseRisco)
{
pAnaliseRisco.IDAnaliseRisco = Guid.NewGuid().ToString();
// pAtividadeAlocada.Admitido = "Admitido";
base.Inserir(pAnaliseRisco);
}
public override void Alterar(AnaliseRisco pAnaliseRisco)
{
List<AnaliseRisco> lAnaliseRisco = Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao) && p.IDAlocacao.Equals(pAnaliseRisco.IDAlocacao)).ToList();
//&& p.idAtividadesDoEstabelecimento.Equals(pAnaliseRisco.idAtividadesDoEstabelecimento)).ToList();
if (lAnaliseRisco.Count.Equals(1))
{
AnaliseRisco oAnaliseRisco = lAnaliseRisco[0];
oAnaliseRisco.UsuarioExclusao = pAnaliseRisco.UsuarioExclusao;
oAnaliseRisco.DataExclusao = pAnaliseRisco.DataExclusao;
base.Alterar(pAnaliseRisco);
}
//AtividadeAlocada tempAtividadeAlocada = Consulta.FirstOrDefault(p => p.IDAtividadeAlocada.Equals(pAtividadeAlocada.IDAtividadeAlocada));
//if (tempAtividadeAlocada == null)
//{
// throw new Exception("Não foi possível encontrar a Atividade através do ID.");
//}
//else
//{
// tempAtividadeAlocada.idAtividadesDoEstabelecimento = pAtividadeAlocada.idAtividadesDoEstabelecimento;
// base.Alterar(tempAtividadeAlocada);
//}
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace GISModel.DTO.AnaliseRisco
{
public class AnaliseRiscosViewModel
{
public string IDAmissao { get; set; }
public string DescricaoAtividade { get; set; }
public string FonteGeradora { get; set; }
//public string NomeDaImagem { get; set; }
public string imagemEstab { get; set; }
public string Imagem { get; set; }
public bool AlocaAtividade { get; set; }
public string IDAtividadeEstabelecimento { get; set; }
public string IDAlocacao { get; set; }
public string IDAtividadeAlocada { get; set; }
public string Riscos { get; set; }
public string PossiveisDanos { get; set; }
public string IDEventoPerigoso { get; set; }
public string IDPerigoPotencial { get; set; }
//public string idControle { get; set; }
//public Enum EClasseDoRisco { get; set; }
//public string Tragetoria { get; set; }
//public string PossiveisDanos { get; set; }
public bool Conhecimento { get; set; }
public bool BemEstar { get; set; }
}
}
<file_sep>using GISWeb.Infraestrutura.Provider.Abstract;
using Ninject;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
namespace GISWeb.Infraestrutura.Filters
{
public class DadosUsuarioAttribute : ActionFilterAttribute
{
[Inject]
public ICustomAuthorizationProvider AutorizacaoProvider { get; set; }
public override void OnResultExecuting(ResultExecutingContext filterContext)
{
base.OnResultExecuting(filterContext);
if (AutorizacaoProvider.UsuarioAutenticado != null)
{
filterContext.Controller.ViewBag.NomeUsuario = AutorizacaoProvider.UsuarioAutenticado.Nome;
filterContext.Controller.ViewBag.MatriculaUsuario = AutorizacaoProvider.UsuarioAutenticado.Login;
}
}
}
}<file_sep>using GISCore.Business.Abstract;
using GISModel.DTO.AnaliseRisco;
using GISModel.DTO.Shared;
using GISModel.Entidades;
using Ninject;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
using GISModel.Enums;
namespace GISWeb.Controllers
{
public class AnaliseRiscoController : Controller
{
#region-Inject
[Inject]
public IAtividadesDoEstabelecimentoBusiness AtividadesDoEstabelecimentoBusiness { get; set; }
[Inject]
public IEmpresaBusiness EmpresaBusiness { get; set; }
[Inject]
public IAtividadeAlocadaBusiness AtividadeAlocadaBusiness { get; set; }
[Inject]
public ITipoDeRiscoBusiness TipoDeRiscoBusiness { get; set; }
[Inject]
public IAnaliseRiscoBusiness AnaliseRiscoBusiness { get; set; }
[Inject]
public IAlocacaoBusiness AlocacaoBusiness { get; set; }
[Inject]
public IAdmissaoBusiness AdmissaoBusiness { get; set; }
[Inject]
public IEmpregadoBusiness EmpregadoBusiness { get; set; }
[Inject]
public IEventoPerigosoBusiness EventoPerigosoBusiness { get; set; }
[Inject]
public IPerigoPotencialBusiness PerigoPotencialBusiness { get; set; }
#endregion
// GET: AtividadeAlocada
public ActionResult Novo(string id)
{
ViewBag.Analise = new SelectList(AtividadesDoEstabelecimentoBusiness.Consulta, "IDAtividadesDoEstabelecimento", "DescricaoDestaAtividade");
return View();
}
//Lista atividade para executar análise de risco.
//Ao escolher a atividade abrirá outra caixa listando os riscos e o empregado informa se
//está apto a executar a atividade.
public ActionResult PesquisarAtividadesRiscos(string idEstabelecimento, string idAlocacao)
{
ViewBag.Imagens = AtividadesDoEstabelecimentoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao) && (p.IDEstabelecimento.Equals(idEstabelecimento))).ToList();
try
{
var listaAmbientes = from AL in AtividadeAlocadaBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao) && p.idAlocacao.Equals(idAlocacao)).ToList()
join AR in AnaliseRiscoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
on AL.IDAtividadeAlocada equals AR.IDAtividadeAlocada
into ARGroup
from item in ARGroup.DefaultIfEmpty()
join AE in AtividadesDoEstabelecimentoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
on AL.idAtividadesDoEstabelecimento equals AE.IDAtividadesDoEstabelecimento
into AEGroup
from item0 in AEGroup.DefaultIfEmpty()
join TR in TipoDeRiscoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
on item0.IDAtividadesDoEstabelecimento equals TR.idAtividadesDoEstabelecimento
into TRGroup
from item1 in TRGroup.DefaultIfEmpty()
select new AnaliseRiscosViewModel
{
DescricaoAtividade = AL.AtividadesDoEstabelecimento.DescricaoDestaAtividade,
//Riscos = item1.PerigoPotencial.DescricaoEvento,
//FonteGeradora = item1.FonteGeradora,
AlocaAtividade = (item == null ? false : true),
Conhecimento = item.Conhecimento,
BemEstar = item.BemEstar,
};
List<AnaliseRiscosViewModel> lAtividadesRiscos = listaAmbientes.ToList();
AtividadesDoEstabelecimento oIDRiscosDoEstabelecimento = AtividadesDoEstabelecimentoBusiness.Consulta.FirstOrDefault(p => p.IDEstabelecimento.Equals(idEstabelecimento));
if (oIDRiscosDoEstabelecimento == null)
{
return Json(new { resultado = new RetornoJSON() { Alerta = "Atividades de Riscos não encontrada." } });
}
else
{
return Json(new { data = RenderRazorViewToString("SalvarAnaliseRisco", lAtividadesRiscos), Contar = lAtividadesRiscos.Count() });
}
}
catch (Exception ex)
{
if (ex.GetBaseException() == null)
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.Message } });
}
else
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.GetBaseException().Message } });
}
}
}
public ActionResult SalvarAnaliseRisco(string idEstabelecimento, string idAlocacao)
{
ViewBag.EventoPerigoso = new SelectList(EventoPerigosoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList(), "IDEventoPerigoso", "Descricao");
ViewBag.PerigoPotencial = new SelectList(PerigoPotencialBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList(), "IDPerigoPotencial", "DescricaoEvento");
var ListaAmbientes = from AL in AtividadeAlocadaBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao) && p.idAlocacao.Equals(idAlocacao)).ToList()
join AR in AnaliseRiscoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
on AL.IDAtividadeAlocada equals AR.IDAtividadeAlocada
into ARGroup
from item in ARGroup.DefaultIfEmpty()
join AE in AtividadesDoEstabelecimentoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
on AL.idAtividadesDoEstabelecimento equals AE.IDAtividadesDoEstabelecimento
into AEGroup
from item2 in AEGroup.DefaultIfEmpty()
join TR in TipoDeRiscoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
on item2.IDAtividadesDoEstabelecimento equals TR.idAtividadesDoEstabelecimento
into TRGroup
from item3 in TRGroup.DefaultIfEmpty()
select new AnaliseRiscosViewModel
{
//DescricaoAtividade = item.AtividadeAlocada.AtividadesDoEstabelecimento.DescricaoDestaAtividade,
//Riscos = item.AtividadeAlocada.AtividadesDoEstabelecimento.EventoPerigoso.Descricao,
FonteGeradora = item3.FonteGeradora,
IDAmissao = AL.Alocacao.Admissao.IDAdmissao,
Imagem = AL.Alocacao.Admissao.Imagem,
Riscos = item3.EventoPerigoso.Descricao,
DescricaoAtividade = item2.DescricaoDestaAtividade,
IDAtividadeAlocada = AL.IDAtividadeAlocada,
Conhecimento = item == null ? false : true,
BemEstar = item == null ? false : true,
PossiveisDanos = item3.PossiveisDanos.DescricaoDanos,
//IDEventoPerigoso = item?.IDEventoPerigoso ?? null,
//IDPerigoPotencial=item? .IDPerigoPotencial ?? null,
//Conhecimento = item?.Conhecimento ?? false,
//BemEstar = item?.BemEstar ?? false,
IDAtividadeEstabelecimento = AL.AtividadesDoEstabelecimento.IDAtividadesDoEstabelecimento,
imagemEstab = AL.AtividadesDoEstabelecimento.Imagem,
AlocaAtividade = item == null ? false : true
};
List<AnaliseRiscosViewModel> lAtividadesRiscos = ListaAmbientes.ToList();
ViewBag.Risco = ListaAmbientes.ToList();
var Emp = from Adm in AdmissaoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
join Aloc in AlocacaoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
on Adm.IDAdmissao equals Aloc.IdAdmissao
join Empre in EmpregadoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
on Adm.IDEmpregado equals Empre.IDEmpregado
join Firm in EmpresaBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
on Adm.IDEmpresa equals Firm.IDEmpresa
where Aloc.IDAlocacao.Equals(idAlocacao)
select new Admissao()
{
DataAdmissao = Adm.DataAdmissao,
Empresa = new Empresa()
{
NomeFantasia = Firm.NomeFantasia
},
Empregado = new Empregado()
{
Nome = Empre.Nome,
DataNascimento = Empre.DataNascimento,
}
};
ViewBag.Emp = Emp.ToList();
return View();
}
[HttpPost]
public ActionResult SalvarAnaliseRisco(AnaliseRisco oAnaliseRisco, string AtivEstabID, string idATivAlocada, bool ConhecID, bool BemEstarID)
{
if (ModelState.IsValid)
{
try
{
oAnaliseRisco.IDAtividadesDoEstabelecimento = AtivEstabID;
oAnaliseRisco.Conhecimento = ConhecID;
oAnaliseRisco.BemEstar = BemEstarID;
oAnaliseRisco.IDAtividadeAlocada = idATivAlocada;
//oAnaliseRisco.BemEstar = oAnaliseRiscosViewModel.BemEstar;
//oAnaliseRisco.Conhecimento = oAnaliseRiscosViewModel.Conhecimento;
AnaliseRiscoBusiness.Inserir(oAnaliseRisco);
TempData["MensagemSucesso"] = "O empregado foi admitido com sucesso.";
//var iAdmin = oAdmissao.IDAdmissao;
return Json(new { resultado = new RetornoJSON() { URL = Url.Action("SalvarAnaliseRisco", "AnaliseRisco") } });
}
catch (Exception ex)
{
if (ex.GetBaseException() == null)
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.Message } });
}
else
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.GetBaseException().Message } });
}
}
}
else
{
return Json(new { resultado = TratarRetornoValidacaoToJSON() });
}
}
private string RenderRazorViewToString(string viewName, object model = null)
{
ViewData.Model = model;
using (var sw = new System.IO.StringWriter())
{
var viewResult = ViewEngines.Engines.FindPartialView(ControllerContext,
viewName);
var viewContext = new ViewContext(ControllerContext, viewResult.View,
ViewData, TempData, sw);
viewResult.View.Render(viewContext, sw);
viewResult.ViewEngine.ReleaseView(ControllerContext, viewResult.View);
return sw.GetStringBuilder().ToString();
}
}
public RetornoJSON TratarRetornoValidacaoToJSON()
{
string msgAlerta = string.Empty;
foreach (ModelState item in ModelState.Values)
{
if (item.Errors.Count > 0)
{
foreach (System.Web.Mvc.ModelError i in item.Errors)
{
msgAlerta += i.ErrorMessage;
}
}
}
return new RetornoJSON()
{
Alerta = msgAlerta,
Erro = string.Empty,
Sucesso = string.Empty
};
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using System.Threading.Tasks;
namespace GISHelpers.Utils
{
public class Criptografador
{
private const string Key1 = "G&i0S.0IsL1fe4everAnd4TW";
private const string Key2 = "G&0iKey4n00bs";
private static TripleDESCryptoServiceProvider DES = new TripleDESCryptoServiceProvider();
private static MD5CryptoServiceProvider MD5 = new MD5CryptoServiceProvider();
private static byte[] MD5Hash(string value)
{
return MD5.ComputeHash(Encoding.UTF8.GetBytes(value));
}
public static string Criptografar(string entry, int keyNumber)
{
string key;
if (keyNumber == 1)
key = Key1;
else if
(keyNumber == 2) key = Key2;
else
throw new InvalidOperationException("Tipo de criptografia não reconhecida.");
DES.Key = MD5Hash(key);
DES.Mode = CipherMode.ECB;
byte[] buffer = Encoding.UTF8.GetBytes(entry);
return Convert.ToBase64String(DES.CreateEncryptor().TransformFinalBlock(buffer, 0, buffer.Length));
}
public static string Descriptografar(string entry, int keyNumber)
{
string key;
if (keyNumber == 1)
key = Key1;
else if
(keyNumber == 2) key = Key2;
else
throw new InvalidOperationException("Tipo de criptografia não reconhecida.");
DES.Key = MD5Hash(key);
DES.Mode = CipherMode.ECB;
byte[] buffer = Convert.FromBase64String(entry);
return Encoding.UTF8.GetString(DES.CreateDecryptor().TransformFinalBlock(buffer, 0, buffer.Length));
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace GISModel.Entidades
{
[Table("tbDocsPorAtividade")]
public class DocsPorAtividade: EntidadeBase
{
[Key]
public string IDDocAtividade { get; set; }
[Display(Name ="Atividade")]
public string idAtividade { get; set; }
[Display(Name = "Documento")]
public string idDocumentosEmpregado { get; set; }
public virtual DocumentosPessoal DocumentosEmpregado { get; set; }
public virtual Atividade Atividade { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace GISModel.Entidades
{
[Table("Rel_AtivEstabTipoRisco")]
public class Rel_AtivEstabTipoRisco: EntidadeBase
{
[Key]
public string IDAtivEstabTipoRisco { get; set; }
//public string idAtividadeEstabelecimento { get; set; }
//public string idTipoDeRisco { get; set; }
//public virtual AtividadesDoEstabelecimento AtividadesDoEstabelecimento { get; set; }
//public virtual TipoDeRisco TipoDeRisco { get; set; }
}
}
<file_sep>
function OnSuccessAtualizarDocumentos(data) {
alert("A");
$('#formEditarDocumentos').removeAttr('style');
$(".LoadingLayout").hide();
$('#blnSalvar').show();
TratarResultadoJSON(data.resultado);
}
function OnBeginAtualizarDocumentos() {
alert("B");
$(".LoadingLayout").show();
$('#blnSalvar').hide();
$("#formEditarDocumentos").css({ opacity: "0.5" });
}<file_sep>
function BuscarDetalhesEstabelecimentoImagens(IDEstabelecimentoImagens) {
$(".LoadingLayout").show();
$.ajax({
method: "POST",
url: "/AtividadesDoEstabelecimento/BuscarDetalhesEstabelecimentoImagens",
data: { idEstabelecimentoImagens: IDEstabelecimentoImagens },
error: function (erro) {
$(".LoadingLayout").hide();
ExibirMensagemGritter('Oops! Erro inesperado', erro.responseText, 'gritter-error');
},
success: function (content) {
$(".LoadingLayout").hide();
if (content.data != null)
{
bootbox.dialog
({
message: content.data,
title: "<span class='bigger-110'>Detalhes da Empresa2</span>",
backdrop: true,
locale: "br",
buttons: {},
onEscape: true
});
}
else {
TratarResultadoJSON(content.resultado);
}
}
});
}
//
function ListaExposicao(idAlocacao, idAtividadeAlocada,Nome, cpf, idAtividadeEstabelecimento) {
$(".LoadingLayout").show();
$.ajax({
method: "POST",
url: "/Admissao/ListaExposicao",
data: { idAlocacao: idAlocacao, idAtividadeAlocada: idAtividadeAlocada, Nome: Nome, cpf: cpf, idAtividadeEstabelecimento: idAtividadeEstabelecimento },
error: function (erro) {
$(".LoadingLayout").hide();
ExibirMensagemGritter('Oops! Erro inesperado', erro.responseText, 'gritter-error')
},
success: function (content) {
$(".LoadingLayout").hide();
if (content.data != null) {
bootbox.dialog({
message: content.data,
title: "<span class='bigger-110'>Exposição ao Risco</span>",
backdrop: true,
locale: "br",
buttons: {},
onEscape: true
});
}
else {
TratarResultadoJSON(content.resultado);
}
}
});
}
//
function ListarPlanoDeAcao(idTipoDeRisco) {
$(".LoadingLayout").show();
$.ajax({
method: "POST",
url: "/PlanoDeAcao/ListarPlanoDeAcao",
data: { idTipoDeRisco: idTipoDeRisco },
error: function (erro) {
$(".LoadingLayout").hide();
ExibirMensagemGritter('Oops! Erro inesperado', erro.responseText, 'gritter-error');
},
success: function (content) {
$(".LoadingLayout").hide();
if (content.data != null) {
bootbox.dialog({
message: content.data,
title: "<span class='bigger-110'>Lista Plano de Ação</span>",
backdrop: true,
locale: "br",
buttons: {},
onEscape: true
});
}
else {
TratarResultadoJSON(content.resultado);
}
}
});
}
function BuscarDetalhesDeMedidasDeControleEstabelecimento(id) {
$(".LoadingLayout").show();
$.ajax({
method: "POST",
url: "/AtividadesDoEstabelecimento/BuscarDetalhesDeMedidasDeControle",
data: { id: id },
error: function (erro) {
$(".LoadingLayout").hide();
ExibirMensagemGritter('Oops! Erro inesperado', erro.responseText, 'gritter-error')
},
success: function (content) {
$(".LoadingLayout").hide();
if (content.data != null) {
bootbox.dialog({
message: content.data,
title: "<span class='bigger-110'>Controles de Riscos</span>",
backdrop: true,
locale: "br",
buttons: {},
onEscape: true
});
}
else {
TratarResultadoJSON(content.resultado);
}
}
});
}
function DeletarEmpresa(IDAdmissao, Nome) {
var callback = function () {
$('.LoadingLayout').show();
$('#dynamic-table').css({ opacity: "0.5" });
$.ajax({
method: "POST",
url: "/Admissao/TerminarComRedirect",
data: { IDAdmissao: IDAdmissao },
error: function (erro) {
$(".LoadingLayout").hide();
$("#dynamic-table").css({ opacity: '' });
ExibirMensagemGritter('Oops! Erro inesperado', erro.responseText, 'gritter-error')
},
success: function (content) {
$('.LoadingLayout').hide();
$("#dynamic-table").css({ opacity: '' });
TratarResultadoJSON(content.resultado);
if (content.resultado.Sucesso != null && content.resultado.Sucesso != "") {
$("#linha-" + IDAdmissao).remove();
}
}
});
};
ExibirMensagemDeConfirmacaoSimples("Tem certeza que deseja excluir esta admissao?", "Exclusão de Admissao", callback, "btn-danger");
//ExibirMensagemDeConfirmacaoSimples("Tem certeza que deseja excluir esta admissao '" + Nome + "'?", "Exclusão de Empresa", callback, "btn-danger");
}
function Exposicao(IDAtividadeAlocada, idAlocacao, idTipoDeRisco, idEmpregado) {
$(".LoadingLayout").show();
$.ajax({
method: "POST",
url: "/Exposicao/Novo",
data: { IDAtividadeAlocada: IDAtividadeAlocada, idAlocacao: idAlocacao, idTipoDeRisco: idTipoDeRisco, idEmpregado: idEmpregado},
error: function (erro) {
$(".LoadingLayout").hide();
ExibirMensagemGritter('Oops! Erro inesperado', erro.responseText, 'gritter-error')
},
success: function (content) {
$(".LoadingLayout").hide();
if (content.data != null) {
bootbox.dialog({
message: content.data,
title: "<span class='bigger-110'>Cadastrar Exposicao do Empregado</span>",
backdrop: true,
locale: "br",
buttons: {},
onEscape: true
});
}
else {
TratarResultadoJSON(content.resultado);
}
}
});
};
function AlocarEmpregado(IDAdmissao, IDEmpregado, IDEmpresa) {
$(".LoadingLayout").show();
$.ajax({
method: "POST",
url: "/Alocacao/Novo",
data: { IDAdmissao: IDAdmissao, IDEmpregado: IDEmpregado, IDEmpresa: IDEmpresa },
error: function (erro) {
$(".LoadingLayout").hide();
ExibirMensagemGritter('Oops! Erro inesperado', erro.responseText, 'gritter-error')
},
success: function (content) {
$(".LoadingLayout").hide();
if (content.data != null) {
bootbox.dialog({
message: content.data,
title: "<span class='bigger-110'>Alocar Empregado</span>",
backdrop: true,
locale: "br",
buttons: {},
onEscape: true
});
}
else {
TratarResultadoJSON(content.resultado);
}
}
});
};
//jQuery(function ($) {
// AplicajQdataTable("dynamic-table", [{ "bSortable": false }, null, { "bSortable": false }], false, 20);
//});
function AlocarEmAmbiente(idEstabelecimento, idAlocacao) {
$(".LoadingLayout").show();
//$('tablePerfisPorMenu').css({ opacity: "0.5" });
$.ajax({
method: "POST",
url: "/AtividadesDoEstabelecimento/AlocarEmAmbiente",
data: { idEstabelecimento: idEstabelecimento, idAlocacao: idAlocacao },
error: function (erro) {
$(".LoadingLayout").hide();
ExibirMensagemGritter('Oops! Erro inesperado', erro.responseText, 'gritter-error')
},
success: function (content) {
$(".LoadingLayout").hide();
//$("tablePerfisPorMenu").css({ opacity: '' });
if (content.data != null) {
bootbox.dialog({
message: content.data,
title: "<span class='bigger-110'>Selecione os Ambientes para este empregado</span>",
backdrop: true,
locale: "br",
buttons: {},
onEscape: true
});
}
else
{
TratarResultadoJSON(content.resultado);
}
AplicajQdataTable("tablePerfisPorMenu", [{ "bSortable": false }, null], false, 25);
}
});
};
function PesquisarAtividadesRiscos(idEstabelecimento, idAlocacao) {
$(".LoadingLayout").show();
//$('tablePerfisPorMenu').css({ opacity: "0.5" });
$.ajax({
method: "POST",
url: "/AnaliseRisco/PesquisarAtividadesRiscos",
data: { idEstabelecimento: idEstabelecimento, idAlocacao: idAlocacao },
error: function (erro) {
$(".LoadingLayout").hide();
ExibirMensagemGritter('Oops! Erro inesperado', erro.responseText, 'gritter-error')
},
success: function (content) {
$(".LoadingLayout").hide();
//$("tablePerfisPorMenu").css({ opacity: '' });
if (content.data != null) {
bootbox.dialog({
message: content.data,
title: "<span class='bigger-110'>Analise os Riscos desta Atividades</span>",
backdrop: true,
locale: "br",
buttons: {},
onEscape: true
});
}
else {
TratarResultadoJSON(content.resultado);
}
AplicajQdataTable("tablePerfisPorMenu", [{ "bSortable": false }, null], false, 25);
}
});
};
function EstabelecimentoAmbienteAlocado(idEstabelecimento, idAlocacao, idAtividadeAlocada, idAtividadesDoEstabelecimento, idEmpregado) {
$(".LoadingLayout").show();
$.ajax({
method: "POST",
url: "/AtividadesDoEstabelecimento/EstabelecimentoAmbienteAlocado",
data: { idEstabelecimento: idEstabelecimento, idAlocacao: idAlocacao, idAtividadeAlocada: idAtividadeAlocada, idAtividadesDoEstabelecimento: idAtividadesDoEstabelecimento, idEmpregado: idEmpregado },
error: function (erro) {
$(".LoadingLayout").hide();
ExibirMensagemGritter('Oops! Erro inesperado', erro.responseText, 'gritter-error')
},
success: function (content) {
$(".LoadingLayout").hide();
if (content.data != null) {
bootbox.dialog({
message: content.data,
title: "<span class='bigger-110'>Ambientes deste Empregado</span>",
backdrop: true,
locale: "br",
buttons: {},
onEscape: true
});
}
else {
TratarResultadoJSON(content.resultado);
}
AplicajQdataTable("RiscosRelacionadoAmbiente", [{ "bSortable": false },null, null], false, 25);
}
});
};
function Ambiente(idEstabelecimento) {
$(".LoadingLayout").show();
$.ajax({
method: "POST",
url: "/AtividadesDoEstabelecimento/Ambiente",
data: { idEstabelecimento: idEstabelecimento },
error: function (erro) {
$(".LoadingLayout").hide();
ExibirMensagemGritter('Oops! Erro inesperado', erro.responseText, 'gritter-error')
},
success: function (content) {
$(".LoadingLayout").hide();
if (content.data != null) {
bootbox.dialog({
message: content.data,
title: "<span class='bigger-110'>Atividades deste Empregado</span>",
backdrop: true,
locale: "br",
buttons: {},
onEscape: true
});
}
else {
TratarResultadoJSON(content.resultado);
}
AplicajQdataTable("RiscosRelacionadoAmbiente", [{ "bSortable": false }, null, null, null, null], false, 25);
}
});
};
function DesalocarEmpregado(IDAlocacao,IDEmpregado) {
var callback = function () {
$('.LoadingLayout').show();
$('#dynamic-table').css({ opacity: "0.5" });
$.ajax({
method: "POST",
url: "/Alocacao/TerminarComRedirect",
data: { IDAlocacao: IDAlocacao, IDEmpregado: IDEmpregado },
error: function (erro) {
$(".LoadingLayout").hide();
$("#dynamic-table").css({ opacity: '' });
ExibirMensagemGritter('Oops! Erro inesperado', erro.responseText, 'gritter-error')
},
success: function (content) {
$('.LoadingLayout').hide();
$("#dynamic-table").css({ opacity: '' });
TratarResultadoJSON(content.resultado);
if (content.resultado.Sucesso != null && content.resultado.Sucesso != "") {
$("#linha-" + IDAlocacao).remove();
}
}
});
};
ExibirMensagemDeConfirmacaoSimples("Tem certeza que deseja desalocar este empregado?", "Desalocar Empregado", callback, "btn-danger");
};
function OnSuccessCadastrarExposicao(data) {
$('#formCadastroExposicao').removeAttr('style');
$(".LoadingLayout").hide();
$('#btnSalvar').show();
TratarResultadoJSON(data.resultado);
//ExibirMsgGritter(data.resultado);
$('#dtExpo').disableSelection();
$('#dtExpo01').disableSelection();
}
function OnBeginCadastrarExposicao() {
$(".LoadingLayout").show();
$('#blnSalvar').hide();
$("#formCadastroExposicao").css({ opacity: "0.5" });
}<file_sep>using GISCore.Business.Abstract;
using GISModel.Entidades;
using System;
using System.Collections.Generic;
using System.Configuration;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace GISCore.Business.Concrete
{
public class ExposicaoBusiness : BaseBusiness<Exposicao>, IExposicaoBusiness
{
public override void Inserir(Exposicao pExposicao)
{
if (Consulta.Any(u => u.IDExposicao.Equals(pExposicao.IDExposicao)))
throw new InvalidOperationException("Não é possível inserir esta exposição, pois já existe uma exposição com este ID.");
pExposicao.IDExposicao = Guid.NewGuid().ToString();
base.Inserir(pExposicao);
}
public override void Alterar(Exposicao pExposicao)
{
Exposicao tempExposicao = Consulta.FirstOrDefault(p => p.IDExposicao.Equals(pExposicao.IDExposicao));
if (tempExposicao == null)
{
throw new Exception("Não foi possível encontrar a exposicao através do ID.");
}
else
{
//tempExposicao.idAtividadesDoEstabelecimento = pExposicao.idAtividadesDoEstabelecimento;
//tempExposicao.idEstabelecimentoImagens = pExposicao.idEstabelecimentoImagens;
tempExposicao.EExposicaoCalor = pExposicao.EExposicaoCalor;
tempExposicao.EExposicaoInsalubre = pExposicao.EExposicaoInsalubre;
base.Alterar(tempExposicao);
}
}
}
}
<file_sep>using BotDetect.Web.Mvc;
namespace GISWeb.Infraestrutura.Helpers
{
public class CaptchaHelper
{
public static MvcCaptcha GetLoginCaptcha()
{
MvcCaptcha loginCaptcha = new MvcCaptcha("LoginCaptcha");
loginCaptcha.UserInputID = "CaptchaCode";
loginCaptcha.ImageSize = new System.Drawing.Size(255, 50);
return loginCaptcha;
}
}
}<file_sep>using GISModel.Enums;
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace GISModel.Entidades
{
[Table("tbExposicao")]
public class Exposicao: EntidadeBase
{
[Key]
public string IDExposicao { get; set; }
public string idAtividadeAlocada { get; set; }
public string idAlocacao { get; set; }
public string idTipoDeRisco { get; set; }
[Display(Name = "Tempo Estimado Mensal")]
public string TempoEstimado { get; set; }
[Display(Name ="Exposição")]
public EExposicaoInsalubre EExposicaoInsalubre { get; set; }
[Display(Name = "Exposição ao Calor")]
public EExposicaoCalor EExposicaoCalor { get; set; }
[Display(Name = "Exposição ")]
public EExposicaoSeg EExposicaoSeg { get; set; }
[Display(Name = "Probabilidade")]
public EProbabilidadeSeg EProbabilidadeSeg { get; set; }
[Display(Name = "Severidade")]
public ESeveridadeSeg ESeveridadeSeg { get; set; }
public virtual AtividadeAlocada AtividadeAlocada { get; set; }
public virtual TipoDeRisco TipoDeRisco { get; set; }
}
}
<file_sep>using GISHelpers.Utils;
using System;
using System.Globalization;
using System.Web;
using System.Xml;
namespace GISCore.Infrastructure.Utils
{
public static class Extensions
{
public static void GravaCookie(string id, string valor, int validadeEmMinutos)
{
try
{
var cookie = new HttpCookie(id, Compactador.Compactar(valor));
cookie.Expires = DateTime.Now.AddMinutes(validadeEmMinutos);
HttpContext.Current.Response.Cookies.Add(cookie);
}
catch { }
}
public static string RecuperaCookie(string id, bool clear = false)
{
string valor = string.Empty;
try
{
HttpCookie cookie = HttpContext.Current.Request.Cookies[id];
if (cookie != null)
if (!string.IsNullOrEmpty(cookie.Value))
valor = Compactador.Descompactar(cookie.Value);
if (clear)
{
cookie = new HttpCookie(id);
cookie.Expires = DateTime.Now.AddDays(-1);
HttpContext.Current.Response.Cookies.Add(cookie);
}
}
catch { }
return valor;
}
public static string TratarNomeProprio(string nomeProprio)
{
return nomeProprio.Trim().ToUpper();
}
[Obsolete("Metodo obsoleto, use os métodos de extensão do Xml 'ToBoolean'. Adicione referência para SPF.Custom.SGEConsulta.Core.Helpers", false)]
public static bool ConverteValorParaBoolean(XmlAttribute atributoXml)
{
try
{
if (atributoXml == null)
return false;
else if (string.IsNullOrWhiteSpace(atributoXml.Value))
return false;
else
return Convert.ToBoolean(atributoXml.Value.ToString());
}
catch { return false; }
}
public static bool ConverteValorParaBoolean(XmlAttribute atributoXml, XmlAttribute atributoXml2)
{
try
{
bool valor1 = false;
bool valor2 = false;
if (atributoXml == null)
valor1 = false;
else if (string.IsNullOrWhiteSpace(atributoXml.Value))
valor1 = false;
else
valor1 = Convert.ToBoolean(atributoXml.Value.ToString());
if (atributoXml2 == null)
valor2 = false;
else if (string.IsNullOrWhiteSpace(atributoXml2.Value))
valor2 = false;
else
valor2 = Convert.ToBoolean(atributoXml2.Value.ToString());
return valor1 || valor2;
}
catch { return false; }
}
public static bool ConverteValorParaBoolean(string value)
{
try
{
if (string.IsNullOrWhiteSpace(value))
return false;
else
return Convert.ToBoolean(value);
}
catch { return false; }
}
[Obsolete("Metodo obsoleto, use os métodos de extensão da Xml 'ToInteger'. Adicione referência para SPF.Custom.SGEConsulta.Core.Helpers", false)]
public static int ConverteValorParaInteiro(XmlAttribute atributoXml)
{
try
{
if (atributoXml == null)
return 0;
else if (string.IsNullOrWhiteSpace(atributoXml.Value))
return 0;
else
return Convert.ToInt32(atributoXml.Value.ToString());
}
catch { return 0; }
}
public static string DataString(string data)
{
try
{
string dataResult = "";
if (data != null && data != "")
{
switch (DateTime.ParseExact(data, "dd/MM/yyyy", CultureInfo.InvariantCulture).Month)
{
case 1:
dataResult = "jan";
break;
case 2:
dataResult = "feb";
break;
case 3:
dataResult = "mar";
break;
case 4:
dataResult = "apr";
break;
case 5:
dataResult = "may";
break;
case 6:
dataResult = "jun";
break;
case 7:
dataResult = "jul";
break;
case 8:
dataResult = "aug";
break;
case 9:
dataResult = "sep";
break;
case 10:
dataResult = "oct";
break;
case 11:
dataResult = "nov";
break;
case 12:
dataResult = "dec";
break;
}
return DateTime.ParseExact(data, "dd/MM/yyyy", CultureInfo.InvariantCulture).Year.ToString() + "-" + dataResult + "-" + DateTime.ParseExact(data, "dd/MM/yyyy", CultureInfo.InvariantCulture).Day.ToString();
// return DateTime.Parse(data).Day.ToString() + "-" + dataResult + "-" + DateTime.Parse(data).Year.ToString();
}
else
return dataResult;
}
catch
{
throw new Exception(data);
}
}
}
}
<file_sep>
function DeletarDocumento(IDDocumento,Nome) {
var callback = function () {
$('.LoadingLayout').show();
$('#dynamic-table').css({ opacity: "0.5" });
$.ajax({
method: "POST",
url: "/DocumentosPessoal/TerminarComRedirect",
data: { IDDocumentosEmpregado: IDDocumento, NomeDocumento: Nome },
error: function (erro) {
$(".LoadingLayout").hide();
$("#dynamic-table").css({ opacity: '' });
ExibirMensagemGritter('Oops! Erro inesperado', erro.responseText, 'gritter-error')
},
success: function (content) {
$('.LoadingLayout').hide();
$("#dynamic-table").css({ opacity: '' });
TratarResultadoJSON(content.resultado);
if (content.resultado.Sucesso !== null && content.resultado.Sucesso !== "") {
$("#linha-" + IDDocumentosEmpregado).remove();
}
}
});
};
ExibirMensagemDeConfirmacaoSimples("Tem certeza que deseja excluir este Documento '" + Nome + "'?", "Exclusão de Documento", callback, "btn-danger");
}<file_sep>using GISCore.Business.Abstract;
using GISModel.DTO.Conta;
using GISModel.Entidades;
using GISModel.Enums;
using Ninject;
using System;
using System.Collections.Generic;
using System.Configuration;
using System.DirectoryServices.AccountManagement;
using System.Linq;
using System.Net;
using System.Net.Mail;
using System.Runtime.InteropServices;
using System.Security.Cryptography;
using System.ServiceModel;
using System.Text;
namespace GISCore.Business.Concrete
{
public class UsuarioBusiness : BaseBusiness<Usuario>, IUsuarioBusiness
{
[Inject]
public IEmpresaBusiness EmpresaBusiness { get; set; }
public Usuario ValidarCredenciais(AutenticacaoModel autenticacaoModel)
{
autenticacaoModel.Login = autenticacaoModel.Login.Trim();
//Buscar usuário sem validar senha, para poder determinar se a validação da senha será com AD ou com a senha interna do GIS
List<Usuario> lUsuarios = Consulta.Where(u => u.Login.Equals(autenticacaoModel.Login) ||
u.CPF.Equals(autenticacaoModel.Login) ||
u.Email.Equals(autenticacaoModel.Login)).ToList();
if (lUsuarios.Count > 1 || lUsuarios.Count < 1)
{
throw new Exception("Não foi possível identificar o seu cadastro.");
}
else
{
if (lUsuarios[0].TipoDeAcesso.Equals(0))
{
//Login, validando a senha no AD
if (Convert.ToBoolean(ConfigurationManager.AppSettings["AD:DMZ"]))
{
//Chamar web service para validar a senha no AD
return null;
}
else
{
Empresa emp = EmpresaBusiness.Consulta.FirstOrDefault(a => string.IsNullOrEmpty(a.UsuarioExclusao) && a.IDEmpresa.Equals(lUsuarios[0].IDEmpresa));
using (PrincipalContext pc = new PrincipalContext(ContextType.Domain, emp.URL_AD))
{
if (pc.ValidateCredentials(autenticacaoModel.Login, autenticacaoModel.Senha))
return null;
else
throw new Exception("Login ou senha incorretos.");
}
}
}
else
{
//Login, validando a senha interna do GIS
string IDUsuario = lUsuarios[0].IDUsuario;
string senhaTemp = CreateHashFromPassword(autenticacaoModel.Senha);
Usuario oUsuario = Consulta.FirstOrDefault(p => p.IDUsuario.Equals(IDUsuario) && p.Senha.Equals(senhaTemp));
if (oUsuario != null)
{
return oUsuario;
}
else
{
throw new Exception("Login ou senha incorretos.");
}
}
}
}
public byte[] RecuperarAvatar(string login)
{
try
{
WCF_Suporte.SuporteClient WCFSuporte = new WCF_Suporte.SuporteClient();
return WCFSuporte.BuscarFotoPerfil(new WCF_Suporte.DadosUsuario()
{
Login = login
});
}
catch (FaultException<WCF_Suporte.FaultSTARSServices> ex)
{
throw new Exception(ex.Detail.Detalhes);
}
}
public void SalvarAvatar(string login, string imageStringBase64, string extensaoArquivo)
{
try
{
WCF_Suporte.SuporteClient WCFSuporte = new WCF_Suporte.SuporteClient();
WCFSuporte.SalvarFotoPerfil(new WCF_Suporte.DadosUsuario()
{
Login = login
}, imageStringBase64);
}
catch (FaultException<WCF_Suporte.FaultSTARSServices> ex)
{
throw new Exception(ex.Detail.Detalhes);
}
}
public override void Inserir(Usuario usuario)
{
if (Consulta.Any(u => u.Login.Equals(usuario.Login) && string.IsNullOrEmpty(u.UsuarioExclusao)))
throw new InvalidOperationException("Este login já está sendo usado por outro usuário.");
if (Consulta.Any(u => u.CPF.Equals(usuario.CPF) && string.IsNullOrEmpty(u.UsuarioExclusao)))
throw new InvalidOperationException("Este CPF já está sendo usado por outro usuário.");
if (Consulta.Any(u => u.Email.Equals(usuario.Email) && string.IsNullOrEmpty(u.UsuarioExclusao)))
throw new InvalidOperationException("Este e-mail já está sendo usado por outro usuário.");
usuario.IDUsuario = Guid.NewGuid().ToString();
base.Inserir(usuario);
if (usuario.TipoDeAcesso.Equals(TipoDeAcesso.AD))
{
EnviarEmailParaUsuarioRecemCriadoAD(usuario);
}
else
{
EnviarEmailParaUsuarioRecemCriadoSistema(usuario);
}
}
public override void Alterar(Usuario entidade)
{
Usuario tempUsuario = Consulta.FirstOrDefault(p => string.IsNullOrEmpty(p.UsuarioExclusao) && p.IDUsuario.Equals(entidade.IDUsuario));
if (tempUsuario == null)
{
throw new Exception("Não foi possível encontrar o usuário através do ID.");
}
else
{
if (Consulta.Any(u => u.Email.Equals(entidade.Email) && string.IsNullOrEmpty(u.UsuarioExclusao) && !entidade.Login.ToUpper().Equals(u.Login.ToUpper())))
throw new InvalidOperationException("Este e-mail já está sendo usado por outro usuário.");
tempUsuario.DataExclusao = DateTime.Now;
tempUsuario.UsuarioExclusao = entidade.UsuarioExclusao;
base.Alterar(tempUsuario);
entidade.IDUsuario = tempUsuario.IDUsuario;
entidade.UsuarioExclusao = string.Empty;
base.Inserir(entidade);
}
}
public void DefinirSenha(NovaSenhaViewModel novaSenhaViewModel)
{
Usuario oUsuario = Consulta.FirstOrDefault(p => string.IsNullOrEmpty(p.UsuarioExclusao) && p.IDUsuario.Equals(novaSenhaViewModel.IDUsuario));
if (oUsuario == null)
{
throw new Exception("Não foi possível localizar o usuário através da identificação. Solicite um novo acesso.");
}
else
{
oUsuario.Senha = CreateHashFromPassword(novaSenhaViewModel.NovaSenha);
Alterar(oUsuario);
EnviarEmailParaUsuarioSenhaAlterada(oUsuario);
}
}
public void SolicitarAcesso(string email)
{
List<Usuario> listaUsuarios = Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao) && p.Email.ToLower().Equals(email.ToLower())).ToList();
if (listaUsuarios.Count() > 1 || listaUsuarios.Count() < 1)
{
listaUsuarios = Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao) && p.Login.ToLower().Equals(email.ToLower())).ToList();
if (listaUsuarios.Count() > 1 || listaUsuarios.Count() < 1)
{
throw new Exception("Não foi possível localizar este usuário no sistema através do e-mail. Tente novamente ou procure o Administrador.");
}
}
EnviarEmailParaUsuarioSolicacaoAcesso(listaUsuarios[0]);
}
#region E-mails
private void EnviarEmailParaUsuarioSolicacaoAcesso(Usuario usuario)
{
string sRemetente = ConfigurationManager.AppSettings["Web:Remetente"];
string sSMTP = ConfigurationManager.AppSettings["Web:SMTP"];
MailMessage mail = new MailMessage(sRemetente, usuario.Email);
string PrimeiroNome = GISHelpers.Utils.Severino.PrimeiraMaiusculaTodasPalavras(usuario.Nome);
if (PrimeiroNome.Contains(" "))
PrimeiroNome = PrimeiroNome.Substring(0, PrimeiroNome.IndexOf(" "));
mail.Subject = PrimeiroNome + ", este é o link para redinir sua senha";
mail.Body = "<html style=\"font-family: Verdana; font-size: 11pt;\"><body>Olá, " + PrimeiroNome + ".";
mail.Body += "<br /><br />";
mail.Body += "<span style=\"color: #222;\">Redefina sua senha para começar novamente.";
mail.Body += "<br /><br />";
string sLink = "http://localhost:26717/Conta/DefinirNovaSenha/" + WebUtility.UrlEncode(GISHelpers.Utils.Criptografador.Criptografar(usuario.IDUsuario + "#" + DateTime.Now.ToString("yyyyMMdd"), 1)).Replace("%", "_@");
mail.Body += "Para alterar sua senha do GiS, clique <a href=\"" + sLink + "\">aqui</a> ou cole o seguinte link no seu navegador.";
mail.Body += "<br /><br />";
mail.Body += sLink;
mail.Body += "<br /><br />";
mail.Body += "O link é válido por 24 horas, portanto, utilize-o imediatamente.";
mail.Body += "<br /><br />";
mail.Body += "Obrigado por utilizar o GiS!<br />";
mail.Body += "<strong>Gestão Inteligente da Segurança</strong>";
mail.Body += "</span>";
mail.Body += "<br /><br />";
mail.Body += "<span style=\"color: #aaa; font-size: 10pt; font-style: italic;\">Mensagem enviada automaticamente, favor não responder este email.</span>";
mail.Body += "</body></html>";
mail.IsBodyHtml = true;
mail.BodyEncoding = Encoding.UTF8;
SmtpClient smtpClient = new SmtpClient(sSMTP, 587);
smtpClient.Credentials = new System.Net.NetworkCredential()
{
UserName = ConfigurationManager.AppSettings["Web:Remetente"],
Password = "<PASSWORD>"
};
smtpClient.EnableSsl = true;
System.Net.ServicePointManager.ServerCertificateValidationCallback = delegate (object s,
System.Security.Cryptography.X509Certificates.X509Certificate certificate,
System.Security.Cryptography.X509Certificates.X509Chain chain,
System.Net.Security.SslPolicyErrors sslPolicyErrors)
{
return true;
};
smtpClient.Send(mail);
}
private void EnviarEmailParaUsuarioSenhaAlterada(Usuario usuario)
{
string sRemetente = ConfigurationManager.AppSettings["Web:Remetente"];
string sSMTP = ConfigurationManager.AppSettings["Web:SMTP"];
MailMessage mail = new MailMessage(sRemetente, usuario.Email);
string PrimeiroNome = GISHelpers.Utils.Severino.PrimeiraMaiusculaTodasPalavras(usuario.Nome);
if (PrimeiroNome.Contains(" "))
PrimeiroNome = PrimeiroNome.Substring(0, PrimeiroNome.IndexOf(" "));
mail.Subject = PrimeiroNome + ", sua senha foi redefinida.";
mail.Body = "<html style=\"font-family: Verdana; font-size: 11pt;\"><body>Olá, " + PrimeiroNome + ".";
mail.Body += "<br /><br />";
mail.Body += "<span style=\"color: #222;\">Você redefiniu sua senha do GiS.";
mail.Body += "<br /><br />";
mail.Body += "Obrigado por utilizar o GiS!<br />";
mail.Body += "<strong>Gestão Inteligente da Segurança</strong>";
mail.Body += "</span>";
mail.Body += "<br /><br />";
mail.Body += "<span style=\"color: #aaa; font-size: 10pt; font-style: italic;\">Mensagem enviada automaticamente, favor não responder este email.</span>";
mail.Body += "</body></html>";
mail.IsBodyHtml = true;
mail.BodyEncoding = Encoding.UTF8;
SmtpClient smtpClient = new SmtpClient(sSMTP, 587);
smtpClient.Credentials = new System.Net.NetworkCredential()
{
UserName = ConfigurationManager.AppSettings["Web:Remetente"],
Password = "<PASSWORD>"
};
smtpClient.EnableSsl = true;
System.Net.ServicePointManager.ServerCertificateValidationCallback = delegate (object s,
System.Security.Cryptography.X509Certificates.X509Certificate certificate,
System.Security.Cryptography.X509Certificates.X509Chain chain,
System.Net.Security.SslPolicyErrors sslPolicyErrors)
{
return true;
};
smtpClient.Send(mail);
}
private void EnviarEmailParaUsuarioRecemCriadoSistema(Usuario usuario)
{
string sRemetente = ConfigurationManager.AppSettings["Web:Remetente"];
string sSMTP = ConfigurationManager.AppSettings["Web:SMTP"];
MailMessage mail = new MailMessage(sRemetente, usuario.Email);
string PrimeiroNome = GISHelpers.Utils.Severino.PrimeiraMaiusculaTodasPalavras(usuario.Nome);
if (PrimeiroNome.Contains(" "))
PrimeiroNome = PrimeiroNome.Substring(0, PrimeiroNome.IndexOf(" "));
mail.Subject = PrimeiroNome + ", seja bem-vindo!";
mail.Body = "<html style=\"font-family: Verdana; font-size: 11pt;\"><body>Olá, " + PrimeiroNome + ";";
mail.Body += "<br /><br />";
string NomeUsuarioInclusao = usuario.UsuarioInclusao;
Usuario uInclusao = Consulta.FirstOrDefault(p => string.IsNullOrEmpty(p.UsuarioExclusao) && p.Login.Equals(usuario.UsuarioInclusao));
if (uInclusao != null && !string.IsNullOrEmpty(uInclusao.Nome))
NomeUsuarioInclusao = uInclusao.Nome;
string sLink = "http://localhost:26717/Conta/DefinirNovaSenha/" + WebUtility.UrlEncode(GISHelpers.Utils.Criptografador.Criptografar(usuario.IDUsuario + "#" + DateTime.Now.ToString("yyyyMMdd"), 1)).Replace("%", "_@");
mail.Body += "Você foi cadastrado no sistema GiS - Gestão Inteligente da Segurança pelo " + GISHelpers.Utils.Severino.PrimeiraMaiusculaTodasPalavras(NomeUsuarioInclusao) + ".";
mail.Body += "<br /><br />";
mail.Body += "Clique <a href=\"" + sLink + "\">aqui</a> para ativar sua conta ou cole o seguinte link no seu navegador.";
mail.Body += "<br /><br />";
mail.Body += sLink;
mail.Body += "<br /><br />";
mail.Body += "Obrigado por utilizar o GiS!<br />";
mail.Body += "<strong>Gestão Inteligente da Segurança</strong>";
mail.Body += "<br /><br />";
mail.Body += "<span style=\"color: #ccc; font-style: italic;\">Mensagem enviada automaticamente, favor não responder este email.</span>";
mail.Body += "</body></html>";
mail.IsBodyHtml = true;
mail.BodyEncoding = Encoding.UTF8;
SmtpClient smtpClient = new SmtpClient(sSMTP, 587);
smtpClient.Credentials = new System.Net.NetworkCredential()
{
UserName = ConfigurationManager.AppSettings["Web:Remetente"],
Password = "<PASSWORD>"
};
smtpClient.EnableSsl = true;
System.Net.ServicePointManager.ServerCertificateValidationCallback = delegate (object s,
System.Security.Cryptography.X509Certificates.X509Certificate certificate,
System.Security.Cryptography.X509Certificates.X509Chain chain,
System.Net.Security.SslPolicyErrors sslPolicyErrors)
{
return true;
};
smtpClient.Send(mail);
}
private void EnviarEmailParaUsuarioRecemCriadoAD(Usuario usuario)
{
string sRemetente = ConfigurationManager.AppSettings["Web:Remetente"];
string sSMTP = ConfigurationManager.AppSettings["Web:SMTP"];
MailMessage mail = new MailMessage(sRemetente, usuario.Email);
string PrimeiroNome = GISHelpers.Utils.Severino.PrimeiraMaiusculaTodasPalavras(usuario.Nome);
if (PrimeiroNome.Contains(" "))
PrimeiroNome = PrimeiroNome.Substring(0, PrimeiroNome.IndexOf(" "));
mail.Subject = PrimeiroNome + ", seja bem-vindo!";
mail.Body = "<html style=\"font-family: Verdana; font-size: 11pt;\"><body>Olá, " + PrimeiroNome + ".";
mail.Body += "<br /><br />";
string NomeUsuarioInclusao = usuario.UsuarioInclusao;
Usuario uInclusao = Consulta.FirstOrDefault(p => string.IsNullOrEmpty(p.UsuarioExclusao) && p.Login.Equals(usuario.UsuarioInclusao));
if (uInclusao != null && !string.IsNullOrEmpty(uInclusao.Nome))
NomeUsuarioInclusao = uInclusao.Nome;
string sLink = "http://localhost:26717/";
mail.Body += "Você foi cadastrado no sistema GiS - Gestão Inteligente da Segurança pelo " + NomeUsuarioInclusao + ".";
mail.Body += "<br /><br />";
mail.Body += "Clique <a href=\"" + sLink + "\">aqui</a> para acessar a sua conta ou cole o seguinte link no seu navegador.";
mail.Body += "<br /><br />";
mail.Body += sLink;
mail.Body += "<br /><br />";
mail.Body += "Obrigado por utilizar o GiS!<br />";
mail.Body += "<strong>Gestão Inteligente da Segurança</strong>";
mail.Body += "<br /><br />";
mail.Body += "<span style=\"color: #ccc; font-style: italic;\">Mensagem enviada automaticamente, favor não responder este email.</span>";
mail.Body += "</body></html>";
mail.IsBodyHtml = true;
mail.BodyEncoding = Encoding.UTF8;
SmtpClient smtpClient = new SmtpClient(sSMTP, 587);
smtpClient.Credentials = new System.Net.NetworkCredential()
{
UserName = ConfigurationManager.AppSettings["Web:Remetente"],
Password = "<PASSWORD>"
};
smtpClient.EnableSsl = true;
System.Net.ServicePointManager.ServerCertificateValidationCallback = delegate (object s,
System.Security.Cryptography.X509Certificates.X509Certificate certificate,
System.Security.Cryptography.X509Certificates.X509Chain chain,
System.Net.Security.SslPolicyErrors sslPolicyErrors)
{
return true;
};
smtpClient.Send(mail);
}
#endregion
#region Senhas
[ComVisible(false)]
private string CreateHashFromPassword(string pstrOriginalPassword)
{
if (string.IsNullOrEmpty(pstrOriginalPassword))
return string.Empty;
string str3 = ConvertToHashedString(pstrOriginalPassword).Substring(0, 5);
byte[] bytes = Encoding.UTF8.GetBytes(pstrOriginalPassword + str3);
HashAlgorithm lobjHash = new MD5CryptoServiceProvider();
return Convert.ToBase64String(lobjHash.ComputeHash(bytes));
}
[ComVisible(false)]
private string ConvertToHashedString(string pstrOriginal)
{
byte[] bytes = Encoding.UTF8.GetBytes(pstrOriginal);
HashAlgorithm lobjHash = new MD5CryptoServiceProvider();
return Convert.ToBase64String(lobjHash.ComputeHash(bytes));
}
#endregion
}
}
<file_sep>using GISCore.Business.Abstract;
using GISCore.Business.Concrete;
using GISModel.DTO.Shared;
using GISModel.Entidades;
using GISWeb.Infraestrutura.Filters;
using Ninject;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
namespace GISWeb.Controllers
{
public class DocsPorAtividadeController : Controller
{
#region Inject
[Inject]
public IDocsPorAtividadeBusiness DocsPorAtividadeBusiness { get; set; }
[Inject]
public IDocumentosPessoalBusiness DocumentosPessoalBusiness { get; set; }
[Inject]
public IDiretoriaBusiness DiretoriaBusiness { get; set; }
[Inject]
public IAtividadeBusiness AtividadeBusiness { get; set; }
[Inject]
public IFuncaoBusiness FuncaoBusiness { get; set; }
//[Inject]
//public IAtividadeRiscosBusiness AtividadeRiscosBusiness { get; set; }
[Inject]
public ITipoDeRiscoBusiness TipoDeRiscoBusiness { get; set; }
#endregion
// GET: TipoDeRisco
public ActionResult Index()
{
ViewBag.Atividade = DocsPorAtividadeBusiness.Consulta.Where(d => string.IsNullOrEmpty(d.UsuarioExclusao)).ToList();
return View();
}
public ActionResult Novo()
{
return View();
}
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Cadastrar(DocumentosPessoal oDocumento)
{
if (ModelState.IsValid)
{
try
{
DocumentosPessoalBusiness.Inserir(oDocumento);
TempData["MensagemSucesso"] = "O Documento '" + oDocumento.NomeDocumento + "' foi cadastrado com sucesso.";
return Json(new { resultado = new RetornoJSON() { URL = Url.Action("Index", "DocumentosPessoal") } });
}
catch (Exception ex)
{
if (ex.GetBaseException() == null)
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.Message } });
}
else
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.GetBaseException().Message } });
}
}
}
else
{
return Json(new { resultado = TratarRetornoValidacaoToJSON() });
}
}
public ActionResult Edicao(string id)
{
return View(DocumentosPessoalBusiness.Consulta.FirstOrDefault(p => p.IDDocumentosEmpregado.Equals(id)));
}
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Atualizar(DocumentosPessoal oDocumentosPessoalBusiness)
{
if (ModelState.IsValid)
{
try
{
DocumentosPessoalBusiness.Alterar(oDocumentosPessoalBusiness);
TempData["MensagemSucesso"] = "O Documento '" + oDocumentosPessoalBusiness.NomeDocumento + "' foi atualizado com sucesso.";
return Json(new { resultado = new RetornoJSON() { URL = Url.Action("Index", "DocumentosPessoal") } });
}
catch (Exception ex)
{
if (ex.GetBaseException() == null)
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.Message } });
}
else
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.GetBaseException().Message } });
}
}
}
else
{
return Json(new { resultado = TratarRetornoValidacaoToJSON() });
}
}
[HttpPost]
public ActionResult TerminarComRedirect(string IDDocumentosEmpregado, string NomeDocumento)
{
try
{
DocumentosPessoal oDocumentosPessoal = DocumentosPessoalBusiness.Consulta.FirstOrDefault(p => p.IDDocumentosEmpregado.Equals(IDDocumentosEmpregado));
if (oDocumentosPessoal == null)
{
return Json(new { resultado = new RetornoJSON() { Erro = "Não foi possível excluir este Documento." } });
}
else
{
oDocumentosPessoal.DataExclusao = DateTime.Now;
oDocumentosPessoal.UsuarioExclusao = "LoginTeste";
DocumentosPessoalBusiness.Alterar(oDocumentosPessoal);
TempData["MensagemSucesso"] = "O Documento foi excluido com sucesso.";
return Json(new { resultado = new RetornoJSON() { URL = Url.Action("Index", "DocumentosPessoal") } });
}
}
catch (Exception ex)
{
if (ex.GetBaseException() == null)
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.Message } });
}
else
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.GetBaseException().Message } });
}
}
}
private string RenderRazorViewToString(string viewName, object model = null)
{
ViewData.Model = model;
using (var sw = new System.IO.StringWriter())
{
var viewResult = ViewEngines.Engines.FindPartialView(ControllerContext,
viewName);
var viewContext = new ViewContext(ControllerContext, viewResult.View,
ViewData, TempData, sw);
viewResult.View.Render(viewContext, sw);
viewResult.ViewEngine.ReleaseView(ControllerContext, viewResult.View);
return sw.GetStringBuilder().ToString();
}
}
public RetornoJSON TratarRetornoValidacaoToJSON()
{
string msgAlerta = string.Empty;
foreach (ModelState item in ModelState.Values)
{
if (item.Errors.Count > 0)
{
foreach (System.Web.Mvc.ModelError i in item.Errors)
{
if (!string.IsNullOrEmpty(i.ErrorMessage))
msgAlerta += i.ErrorMessage;
else
msgAlerta += i.Exception.Message;
}
}
}
return new RetornoJSON()
{
Alerta = msgAlerta,
Erro = string.Empty,
Sucesso = string.Empty
};
}
}
}<file_sep>using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Threading;
using System.Web;
namespace GISWeb
{
public class LanguageMang
{
public static List<Languages> AvailableLanguages = new List<Languages> {
new Languages {
LanguageFullName = "English", LanguageCultureName = "en"
},
new Languages {
LanguageFullName = "Português", LanguageCultureName = "pt-br"
},
new Languages {
LanguageFullName = "Português", LanguageCultureName = "es-mx"
}
};
public static bool IsLanguageAvailable(string lang)
{
return AvailableLanguages.Where(a => a.LanguageCultureName.Equals(lang)).FirstOrDefault() != null ? true : false;
}
public static string GetDefaultLanguage()
{
return AvailableLanguages[0].LanguageCultureName;
}
public void SetLanguage(string lang)
{
try
{
if (!IsLanguageAvailable(lang)) lang = GetDefaultLanguage();
var cultureInfo = new CultureInfo(lang);
Thread.CurrentThread.CurrentUICulture = cultureInfo;
Thread.CurrentThread.CurrentCulture = CultureInfo.CreateSpecificCulture(cultureInfo.Name);
HttpCookie langCookie = new HttpCookie("culture", lang);
langCookie.Expires = DateTime.Now.AddYears(1);
HttpContext.Current.Response.Cookies.Add(langCookie);
}
catch (Exception) { }
}
}
public class Languages
{
public string LanguageFullName
{
get;
set;
}
public string LanguageCultureName
{
get;
set;
}
}
}<file_sep>using GISCore.Business.Abstract;
using GISModel.DTO.Shared;
using GISModel.Entidades;
using GISWeb.Infraestrutura.Filters;
using GISWeb.Infraestrutura.Provider.Abstract;
using Ninject;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web.Mvc;
namespace GISWeb.Controllers
{
public class UsuarioController : BaseController
{
#region Inject
[Inject]
public IEmpresaBusiness EmpresaBusiness { get; set; }
[Inject]
public IUsuarioBusiness UsuarioBusiness { get; set; }
[Inject]
public IDepartamentoBusiness DepartamentoBusiness { get; set; }
[Inject]
public ICustomAuthorizationProvider CustomAuthorizationProvider { get; set; }
#endregion
[MenuAtivo(MenuAtivo = "Administracao/Usuarios")]
public ActionResult Index()
{
//ViewBag.Usuarios = UsuarioBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).OrderBy(o => o.Nome).ToList();
ViewBag.Usuarios = (from usr in UsuarioBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
join emp in EmpresaBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList() on usr.IDEmpresa equals emp.IDEmpresa
join dep in DepartamentoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList() on usr.IDDepartamento equals dep.IDDepartamento
select new Usuario()
{
IDUsuario = usr.IDUsuario,
Nome = usr.Nome,
Login = usr.Login,
CPF = usr.CPF,
Email = usr.Email
}).ToList();
return View();
}
[MenuAtivo(MenuAtivo = "Administracao/Usuarios")]
public ActionResult Novo()
{
ViewBag.Empresas = EmpresaBusiness.Consulta.Where(a => string.IsNullOrEmpty(a.UsuarioExclusao)).ToList();
return View();
}
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Cadastrar(Usuario Usuario)
{
if (ModelState.IsValid)
{
try
{
bool bRedirect = false;
if (Usuario.IDUsuario != null && Usuario.IDUsuario.Equals("redirect"))
bRedirect = true;
//Usuario.UsuarioInclusao = CustomAuthorizationProvider.UsuarioAutenticado.Login;
Usuario.UsuarioInclusao = "Teste";
UsuarioBusiness.Inserir(Usuario);
if (bRedirect)
{
TempData["MensagemSucesso"] = "O usuário '" + Usuario.Nome + "' foi cadastrado com sucesso.";
return Json(new { resultado = new RetornoJSON() { URL = "#" + Url.Action("Index", "Usuario").Substring(1) } });
}
else
{
return Json(new { resultado = new RetornoJSON() { Sucesso = "O usuário '" + Usuario.Nome + "' foi cadastrado com sucesso." } });
}
}
catch (Exception ex)
{
if (ex.GetBaseException() == null)
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.Message } });
}
else
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.GetBaseException().Message } });
}
}
}
else
{
return Json(new { resultado = TratarRetornoValidacaoToJSON() });
}
}
[MenuAtivo(MenuAtivo = "Administracao/Usuarios")]
public ActionResult Edicao(string id)
{
ViewBag.Empresas = EmpresaBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList();
List<Usuario> usuarios = (from usr in UsuarioBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao) && p.IDUsuario.Equals(id)).ToList()
join emp in EmpresaBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList() on usr.IDEmpresa equals emp.IDEmpresa
join dep in DepartamentoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList() on usr.IDDepartamento equals dep.IDDepartamento
select new Usuario()
{
IDUsuario = usr.IDUsuario,
Nome = usr.Nome,
Login = usr.Login,
CPF = usr.CPF,
Email = usr.Email,
TipoDeAcesso = usr.TipoDeAcesso,
DataInclusao = usr.DataInclusao
}).ToList();
if (usuarios.Count > 0)
{
Usuario oUsuario = usuarios[0];
ViewBag.Departamentos = DepartamentoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao) && p.IDEmpresa.Equals(oUsuario.IDEmpresa)).ToList();
return PartialView(oUsuario);
}
return View();
}
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Atualizar(Usuario Usuario)
{
if (ModelState.IsValid)
{
try
{
Usuario.UsuarioExclusao = CustomAuthorizationProvider.UsuarioAutenticado.Login;
UsuarioBusiness.Alterar(Usuario);
TempData["MensagemSucesso"] = "O usuário '" + Usuario.Nome + "' foi atualizado com sucesso.";
return Json(new { resultado = new RetornoJSON() { URL = "#" + Url.Action("Index", "Usuario").Substring(1) } });
}
catch (Exception ex)
{
if (ex.GetBaseException() == null)
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.Message } });
}
else
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.GetBaseException().Message } });
}
}
}
else
{
return Json(new { resultado = TratarRetornoValidacaoToJSON() });
}
}
public ActionResult BuscarUsuarioPorID(string IDUsuario)
{
try
{
//Usuario oUsuario = UsuarioBusiness.Consulta.FirstOrDefault(p => string.IsNullOrEmpty(p.UsuarioExclusao) && p.UniqueKey.Equals(IDUsuario));
List<Usuario> usuarios = (from usr in UsuarioBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao) && p.IDUsuario.Equals(IDUsuario)).ToList()
join emp in EmpresaBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList() on usr.IDEmpresa equals emp.IDEmpresa
join dep in DepartamentoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList() on usr.IDDepartamento equals dep.IDDepartamento
select new Usuario()
{
IDUsuario = usr.IDUsuario,
Nome = usr.Nome,
Login = usr.Login,
CPF = usr.CPF,
Email = usr.Email,
TipoDeAcesso = usr.TipoDeAcesso,
DataInclusao = usr.DataInclusao,
}).ToList();
if (usuarios.Count < 1)
{
return Json(new { resultado = new RetornoJSON() { Alerta = "Usuário com o ID '" + IDUsuario + "' não encontrado." } });
}
else
{
Usuario oUsuario = usuarios[0];
return Json(new { data = RenderRazorViewToString("_Detalhes", oUsuario) });
}
}
catch (Exception ex)
{
if (ex.GetBaseException() == null)
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.Message } });
}
else
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.GetBaseException().Message } });
}
}
}
[HttpPost]
public ActionResult Terminar(string IDUsuario)
{
try
{
Usuario oUsuario = UsuarioBusiness.Consulta.FirstOrDefault(p => string.IsNullOrEmpty(p.UsuarioExclusao) && p.IDUsuario.Equals(IDUsuario));
if (oUsuario == null)
{
return Json(new { resultado = new RetornoJSON() { Erro = "Não foi possível excluir o usuário, pois o mesmo não foi localizado." } });
}
else
{
oUsuario.UsuarioExclusao = CustomAuthorizationProvider.UsuarioAutenticado.Login;
//UsuarioBusiness.Terminar(oUsuario);
return Json(new { resultado = new RetornoJSON() { Sucesso = "O usuário '" + oUsuario.Nome + "' foi excluído com sucesso." } });
}
}
catch (Exception ex)
{
if (ex.GetBaseException() == null)
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.Message } });
}
else
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.GetBaseException().Message } });
}
}
}
[HttpPost]
public ActionResult TerminarComRedirect(string IDUsuario)
{
try
{
Usuario oUsuario = UsuarioBusiness.Consulta.FirstOrDefault(p => string.IsNullOrEmpty(p.UsuarioExclusao) && p.IDUsuario.Equals(IDUsuario));
if (oUsuario == null)
{
return Json(new { resultado = new RetornoJSON() { Erro = "Não foi possível excluir o usuário, pois o mesmo não foi localizado." } });
}
else
{
oUsuario.DataExclusao = DateTime.Now;
oUsuario.UsuarioExclusao = CustomAuthorizationProvider.UsuarioAutenticado.Login;
UsuarioBusiness.Alterar(oUsuario);
TempData["MensagemSucesso"] = "O usuário '" + oUsuario.Nome + "' foi excluído com sucesso.";
return Json(new { resultado = new RetornoJSON() { URL = "#" + Url.Action("Index", "Empresa").Substring(1) } });
}
}
catch (Exception ex)
{
if (ex.GetBaseException() == null)
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.Message } });
}
else
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.GetBaseException().Message } });
}
}
}
}
}<file_sep>using System.ComponentModel.DataAnnotations;
namespace GISModel.DTO.Conta
{
public class NovaSenhaViewModel
{
public string IDUsuario { get; set; }
public string Email { get; set; }
[Required(ErrorMessage = "Informe a nova senha")]
[Display(Name = "Nova Senha")]
[DataType(DataType.Password)]
public string NovaSenha { get; set; }
[Required(ErrorMessage = "Informe a nova senha novamente")]
[Display(Name = "Confirmar Nova Senha")]
[DataType(DataType.Password)]
public string ConfirmarNovaSenha { get; set; }
}
}
<file_sep>using GISCore.Business.Abstract;
using GISModel.Entidades;
using System;
using System.Collections.Generic;
using System.Configuration;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace GISCore.Business.Concrete
{
public class PerigoPotencialBusiness : BaseBusiness<PerigoPotencial>, IPerigoPotencialBusiness
{
public override void Inserir(PerigoPotencial pPerigo)
{
pPerigo.IDPerigoPotencial = Guid.NewGuid().ToString();
base.Inserir(pPerigo);
}
public override void Alterar(PerigoPotencial pPerigo)
{
PerigoPotencial tempPerigoPotencial = Consulta.FirstOrDefault(p => p.IDPerigoPotencial.Equals(pPerigo.IDPerigoPotencial));
if (tempPerigoPotencial == null)
{
throw new Exception("Não foi possível encontrar o Evento através do ID.");
}
else
{
tempPerigoPotencial.DescricaoEvento = pPerigo.DescricaoEvento;
base.Alterar(tempPerigoPotencial);
}
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace GISModel.Entidades
{
[Table("tbDocumentosPessoal")]
public class DocumentosPessoal: EntidadeBase
{
[Key]
public string IDDocumentosEmpregado { get; set; }
[Display(Name ="Nome do Documento")]
public string NomeDocumento { get; set; }
[Display(Name ="Descrição")]
public string DescriçãoDocumento { get; set; }
}
}
<file_sep>using GISCore.Business.Abstract;
using GISModel.Entidades;
using System;
using System.Collections.Generic;
using System.Configuration;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace GISCore.Business.Concrete
{
public class DocumentosPessoalBusiness : BaseBusiness<DocumentosPessoal>, IDocumentosPessoalBusiness
{
public override void Inserir(DocumentosPessoal pDocumentosPessoal)
{
pDocumentosPessoal.IDDocumentosEmpregado = Guid.NewGuid().ToString();
// pAtividadeAlocada.Admitido = "Admitido";
base.Inserir(pDocumentosPessoal);
}
public override void Alterar(DocumentosPessoal pDocumentosPessoal)
{
DocumentosPessoal tempDocumentosPessoal = Consulta.FirstOrDefault(p => p.IDDocumentosEmpregado.Equals(pDocumentosPessoal.IDDocumentosEmpregado));
if (tempDocumentosPessoal == null)
{
throw new Exception("Não foi possível encontrar este Documento");
}
tempDocumentosPessoal.NomeDocumento = pDocumentosPessoal.NomeDocumento;
base.Alterar(tempDocumentosPessoal);
}
}
}
<file_sep>
function CriarPlanoDeAção(IDIdentificador) {
$(".LoadingLayout").show();
$.ajax({
method: "POST",
url: "/PlanoDeAcao/CriarPlanoDeAção",
data: { IDIdentificador: IDIdentificador },
error: function (erro) {
$(".LoadingLayout").hide();
ExibirMensagemGritter('Oops! Erro inesperado', erro.responseText, 'gritter-error')
},
success: function (content) {
$(".LoadingLayout").hide();
if (content.data != null) {
bootbox.dialog({
message: content.data,
title: "<span class='bigger-110'>Plano De Ação</span>",
backdrop: true,
locale: "br",
buttons: {},
onEscape: true
});
}
else {
TratarResultadoJSON(content.resultado);
}
}
});
}
function OnSuccessCadastrarTipoDeRisco(data) {
$('#formCadastroTipoDeRisco').removeAttr('style');
$(".LoadingLayout").hide();
$('#btnSalvar').show();
TratarResultadoJSON(data.resultado);
}
function OnBeginCadastrarTipoDeRisco() {
$(".LoadingLayout").show();
$('#btnSalvar').hide();
$("#formCadastroTipoDeRisco").css({ opacity: "0.5" });
}<file_sep>using BotDetect.Web.Mvc;
using GISCore.Business.Abstract;
using GISCore.Infrastructure.Utils;
using GISModel.DTO.Conta;
using GISModel.DTO.Shared;
using GISWeb.Infraestrutura.Filters;
using GISWeb.Infraestrutura.Provider.Abstract;
using Ninject;
using System;
using System.Configuration;
using System.Linq;
using System.Net;
using System.Web;
using System.Web.Mvc;
using System.Web.SessionState;
using System.Web.UI;
namespace GISWeb.Controllers
{
[SessionState(SessionStateBehavior.ReadOnly)]
public class AccountController : BaseController
{
#region Inject
[Inject]
public ICustomAuthorizationProvider AutorizacaoProvider { get; set; }
[Inject]
public IUsuarioBusiness UsuarioBusiness { get; set; }
#endregion
public ActionResult Login(string path)
{
ViewBag.OcultarMenus = true;
ViewBag.IncluirCaptcha = Convert.ToBoolean(ConfigurationManager.AppSettings["AD:DMZ"]);
ViewBag.UrlAnterior = path;
return View();
}
[HttpPost]
[AllowAnonymous]
[ValidateAntiForgeryToken]
public ActionResult Login(AutenticacaoModel usuario)
{
try
{
if (ModelState.IsValid)
{
foreach (var cookieKey in Request.Cookies.AllKeys.Where(c => !c.Equals("__RequestVerificationToken")))
{
var deleteCookie = new HttpCookie(cookieKey);
deleteCookie.Expires = DateTime.Now;
Response.Cookies.Add(deleteCookie);
}
AutorizacaoProvider.Logar(usuario);
if (!string.IsNullOrWhiteSpace(usuario.Nome))
return Json(new { url = usuario.Nome.Replace("$", "&") });
else
return Json(new { url = Url.Action(ConfigurationManager.AppSettings["Web:DefaultAction"], ConfigurationManager.AppSettings["Web:DefaultController"]) });
}
return View(usuario);
}
catch (Exception ex)
{
return Json(new { alerta = ex.Message, titulo = "Oops! Problema ao realizar login..." });
}
}
[HttpPost]
[AllowAnonymous]
[ValidateAntiForgeryToken]
[CaptchaValidation("CaptchaCode", "LoginCaptcha", "Código do CAPTCHA incorreto.")]
public ActionResult LoginComCaptcha(AutenticacaoModel usuario)
{
MvcCaptcha.ResetCaptcha("LoginCaptcha");
ViewBag.IncluirCaptcha = Convert.ToBoolean(ConfigurationManager.AppSettings["AD:DMZ"]);
try
{
if (ModelState.IsValid)
{
AutorizacaoProvider.Logar(usuario);
if (!string.IsNullOrWhiteSpace(usuario.Nome))
return Json(new { url = usuario.Nome.Replace("$", "&") });
else
return Json(new { url = Url.Action(ConfigurationManager.AppSettings["Web:DefaultAction"], ConfigurationManager.AppSettings["Web:DefaultController"]) });
}
return View("Login", usuario);
}
catch (Exception ex)
{
return Json(new { alerta = ex.Message, titulo = "Oops! Problema ao realizar login..." });
}
}
public ActionResult Logout()
{
AutorizacaoProvider.Deslogar();
foreach (var cookieKey in Request.Cookies.AllKeys)
{
var deleteCookie = new HttpCookie(cookieKey);
deleteCookie.Expires = DateTime.Now;
Response.Cookies.Add(deleteCookie);
}
return RedirectToAction("Login", "Account");
}
[Autorizador]
[DadosUsuario]
public ActionResult Perfil()
{
return View(AutorizacaoProvider.UsuarioAutenticado);
}
[HttpPost]
[Autorizador]
[DadosUsuario]
public ActionResult AtualizarFoto(string imagemStringBase64)
{
try
{
UsuarioBusiness.SalvarAvatar(AutorizacaoProvider.UsuarioAutenticado.Login, imagemStringBase64, "jpg");
}
catch (Exception ex)
{
Extensions.GravaCookie("MensagemErro", ex.Message, 2);
}
return Json(new { url = Url.Action("Perfil") });
}
[OutputCache(Duration = 604800, Location = OutputCacheLocation.Client, VaryByParam = "login")]
public ActionResult FotoPerfil(string login)
{
byte[] avatar = null;
try
{
avatar = UsuarioBusiness.RecuperarAvatar(login);
}
catch { }
if (avatar == null || avatar.Length == 0)
avatar = System.IO.File.ReadAllBytes(Server.MapPath("~/Content/Ace/avatars/unknown.png"));
return File(avatar, "image/jpeg");
}
public ActionResult DefinirNovaSenha(string id)
{
try
{
if (string.IsNullOrEmpty(id))
{
TempData["MensagemErro"] = "Não foi possível recuperar a identificação do usuário.";
}
else
{
id = GISHelpers.Utils.Criptografador.Descriptografar(WebUtility.UrlDecode(id.Replace("_@", "%")), 1);
string numDiasExpiracao = ConfigurationManager.AppSettings["Web:ExpirarLinkAcesso"];
if (string.IsNullOrEmpty(numDiasExpiracao))
numDiasExpiracao = "1";
if (DateTime.Now.Subtract(DateTime.ParseExact(id.Substring(id.IndexOf("#") + 1), "yyyyMMdd", System.Globalization.CultureInfo.InvariantCulture)).Days > int.Parse(numDiasExpiracao))
{
TempData["MensagemErro"] = "Este link já expirou, solicite um outro link na opção abaixo.";
}
else
{
NovaSenhaViewModel oNovaSenhaViewModel = new NovaSenhaViewModel();
oNovaSenhaViewModel.IDUsuario = id.Substring(0, id.IndexOf("#"));
return View(oNovaSenhaViewModel);
}
}
}
catch (Exception ex)
{
if (ex.GetBaseException() == null)
{
TempData["MensagemErro"] = ex.Message;
}
else
{
TempData["MensagemErro"] = ex.GetBaseException().Message;
}
}
return View();
}
[HttpPost]
[AllowAnonymous]
[ValidateAntiForgeryToken]
public ActionResult DefinirSenha(NovaSenhaViewModel novaSenhaViewModel)
{
if (ModelState.IsValid)
{
if (novaSenhaViewModel.NovaSenha.Equals(novaSenhaViewModel.ConfirmarNovaSenha))
{
try
{
if (string.IsNullOrEmpty(novaSenhaViewModel.IDUsuario))
return Json(new { resultado = new RetornoJSON() { Erro = "Não foi possível localizar o ID do usuário através de sua requisição. Solicite um novo acesso." } });
UsuarioBusiness.DefinirSenha(novaSenhaViewModel);
TempData["MensagemSucesso"] = "Senha alterada com sucesso.";
return Json(new { resultado = new RetornoJSON() { URL = Url.Action("Login", "Conta") } });
}
catch (Exception ex)
{
if (ex.GetBaseException() == null)
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.Message } });
}
else
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.GetBaseException().Message } });
}
}
}
else
{
return Json(new { resultado = new RetornoJSON() { Erro = "As duas senhas devem ser identicas." } });
}
}
else
{
return Json(new { resultado = TratarRetornoValidacaoToJSON() });
}
}
[HttpPost]
[AllowAnonymous]
[ValidateAntiForgeryToken]
public ActionResult SolicitarAcesso(NovaSenhaViewModel novaSenhaViewModel)
{
if (!string.IsNullOrEmpty(novaSenhaViewModel.Email))
{
try
{
UsuarioBusiness.SolicitarAcesso(novaSenhaViewModel.Email);
TempData["MensagemSucesso"] = "Solicitação de acesso realizada com sucesso.";
return Json(new { resultado = new RetornoJSON() { URL = Url.Action("Login", "Conta") } });
}
catch (Exception ex)
{
if (ex.GetBaseException() == null)
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.Message } });
}
else
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.GetBaseException().Message } });
}
}
}
else
{
return Json(new { resultado = new RetornoJSON() { Erro = "Informe o e-mail cadastrado em sua conta." } });
}
}
}
}<file_sep>using GISModel.CustomAttributes;
using GISModel.Enums;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
namespace GISModel.Entidades
{
[Table("tbUsuario")]
public class Usuario : EntidadeBase
{
[Key]
public string IDUsuario { get; set; }
[Display(Name = "CPF")]
[Required(ErrorMessage = "CPF obrigatório")]
[CustomValidationCPF(ErrorMessage = "CPF inválido")]
public string CPF { get; set; }
[Required(ErrorMessage = "Informe o nome do usuário")]
public string Nome { get; set; }
[Required(ErrorMessage = "Informe o Login do usuário")]
public string Login { get; set; }
public string Senha { get; set; }
[Required(ErrorMessage = "Informe o e-mail do usuário")]
[DataType(DataType.EmailAddress, ErrorMessage = "Informe um e-mail válido")]
public string Email { get; set; }
[Display(Name = "Empresa")]
[Required(ErrorMessage = "Selecione uma empresa")]
public string IDEmpresa { get; set; }
[Display(Name = "Departamento")]
[Required(ErrorMessage = "Selecione um departamento")]
public string IDDepartamento { get; set; }
[Display(Name = "Tipo de Acesso")]
[Required(ErrorMessage = "Selecione como este usuário será validado")]
public TipoDeAcesso? TipoDeAcesso { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace GISModel.Entidades
{
[Table("tbDocAtividade")]
public class DocAtividade: EntidadeBase
{
[Key]
public string IDDocAtividade { get; set; }
public string IDUniqueKey { get; set; }
public string IDDocumentosEmpregado { get; set; }
public Atividade UniqueKey { get; set; }
public virtual DocumentosPessoal DocumentosEmpregado { get; set; }
}
}
<file_sep>namespace GISCore.Migrations
{
using System;
using System.Data.Entity.Migrations;
public partial class v01 : DbMigration
{
public override void Up()
{
DropForeignKey("dbo.tbDocsPorAtividade", "IDAtividade", "dbo.tbAtividade");
DropIndex("dbo.tbDocsPorAtividade", new[] { "IDAtividade" });
DropIndex("dbo.tbDocsPorAtividade", new[] { "IDDocumentosEmpregado" });
CreateTable(
"dbo.tbDocAtividade",
c => new
{
IDDocAtividade = c.String(nullable: false, maxLength: 128),
IDUniqueKey = c.String(),
IDDocumentosEmpregado = c.String(maxLength: 128),
UsuarioInclusao = c.String(),
DataInclusao = c.DateTime(nullable: false),
UsuarioExclusao = c.String(),
DataExclusao = c.DateTime(nullable: false),
UniqueKey_IDAtividade = c.String(maxLength: 128),
})
.PrimaryKey(t => t.IDDocAtividade)
.ForeignKey("dbo.tbDocumentosPessoal", t => t.IDDocumentosEmpregado)
.ForeignKey("dbo.tbAtividade", t => t.UniqueKey_IDAtividade)
.Index(t => t.IDDocumentosEmpregado)
.Index(t => t.UniqueKey_IDAtividade);
AlterColumn("dbo.tbDocsPorAtividade", "idAtividade", c => c.String());
CreateIndex("dbo.tbDocsPorAtividade", "idDocumentosEmpregado");
}
public override void Down()
{
DropForeignKey("dbo.tbDocAtividade", "UniqueKey_IDAtividade", "dbo.tbAtividade");
DropForeignKey("dbo.tbDocAtividade", "IDDocumentosEmpregado", "dbo.tbDocumentosPessoal");
DropIndex("dbo.tbDocsPorAtividade", new[] { "idDocumentosEmpregado" });
DropIndex("dbo.tbDocAtividade", new[] { "UniqueKey_IDAtividade" });
DropIndex("dbo.tbDocAtividade", new[] { "IDDocumentosEmpregado" });
AlterColumn("dbo.tbDocsPorAtividade", "idAtividade", c => c.String(maxLength: 128));
DropTable("dbo.tbDocAtividade");
CreateIndex("dbo.tbDocsPorAtividade", "IDDocumentosEmpregado");
CreateIndex("dbo.tbDocsPorAtividade", "IDAtividade");
AddForeignKey("dbo.tbDocsPorAtividade", "IDAtividade", "dbo.tbAtividade", "IDAtividade");
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Data.Entity;
using GISModel.Entidades;
namespace GISCore.Repository.Configuration
{
public class InitializerBanco : DropCreateDatabaseIfModelChanges<SESTECContext>
{
protected override void Seed(SESTECContext context)
{
//criar alguns dados no banco
new List<Empregado>
{
new Empregado
{
IDEmpregado = "14d57737-5611-4216-8f1d-cc120b16dbc3",
CPF ="24547551812",
Nome ="<NAME>",
DataNascimento = DateTime.Now,
},
new Empregado
{
IDEmpregado = "d0dc096d-2929-4a85-88ff-993779d91745",
CPF ="24547551812",
Nome ="<NAME>",
DataNascimento = DateTime.Now,
}
}.ForEach(p => context.Empregado.Add(p));
base.Seed(context);
}
}
}
<file_sep>using GISCore.Business.Abstract;
using GISCore.Business.Concrete;
using GISModel.DTO.Shared;
using GISModel.Entidades;
using Ninject;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
namespace GISWeb.Controllers
{
public class DiretoriaController : Controller
{
#region Inject
[Inject]
public IDiretoriaBusiness DiretoriaBusiness { get; set; }
[Inject]
public ITipoDeRiscoBusiness TipoDeRiscoBusiness { get; set; }
[Inject]
public ICargoBusiness CargoBusiness { get; set; }
[Inject]
public IEmpresaBusiness EmpresaBusiness { get; set; }
[Inject]
public IDepartamentoBusiness DepartamentoBusiness { get; set; }
#endregion
// GET: TipoDeRisco
public ActionResult Index()
{
ViewBag.Diretoria = DiretoriaBusiness.Consulta.Where(d => string.IsNullOrEmpty(d.UsuarioExclusao)).Distinct().ToList();
ViewBag.Departamentos = DepartamentoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList();
var lista = from Dir in DiretoriaBusiness.Consulta.Where(d => string.IsNullOrEmpty(d.UsuarioExclusao)).Distinct().ToList()
join Dep in DepartamentoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
on Dir.IDDiretoria equals Dep.IDDiretoria
select new Departamento()
{
IDDepartamento = Dep.IDDepartamento,
Sigla= Dep.Sigla,
Descricao = Dep.Descricao,
Diretoria = new Diretoria()
{
IDDiretoria=Dir.IDDiretoria,
Sigla=Dir.Sigla,
Descricao = Dir.Descricao
}
};
ViewBag.lista = lista;
return View();
}
public ActionResult Novo(string IDEmpresa, string nome)
{
ViewBag.Empresa = new SelectList(EmpresaBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList(), "IDEmpresa", "NomeFantasia");
ViewBag.Empresas = IDEmpresa;
ViewBag.NomeEmpresa = nome;
try
{
// Atividade oAtividade = AtividadeBusiness.Consulta.FirstOrDefault(p => string.IsNullOrEmpty(p.UsuarioExclusao) && p.idFuncao.Equals(id));
if (ViewBag.Empresas == null)
{
return Json(new { resultado = new RetornoJSON() { Alerta = "Parametro id não passado." } });
}
else
{
return Json(new { data = RenderRazorViewToString("_Novo") });
}
}
catch (Exception ex)
{
if (ex.GetBaseException() == null)
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.Message } });
}
else
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.GetBaseException().Message } });
}
}
}
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Cadastrar(Diretoria oDiretoria)
{
if (ModelState.IsValid)
{
try
{
DiretoriaBusiness.Inserir(oDiretoria);
TempData["MensagemSucesso"] = "A Diretoria'" + oDiretoria.Sigla + "' foi cadastrada com sucesso!";
return Json(new { resultado = new RetornoJSON() { URL = Url.Action("EmpresaCriacoes","Empresa", new { id = oDiretoria.IDEmpresa })}});
}
catch (Exception ex)
{
if (ex.GetBaseException() == null)
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.Message } });
}
else
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.GetBaseException().Message } });
}
}
}
else
{
return Json(new { resultado = TratarRetornoValidacaoToJSON() });
}
}
public ActionResult Edicao(string id)
{
//ViewBag.Riscos = TipoDeRiscoBusiness.Consulta.Where(p => p.IDTipoDeRisco.Equals(id));
return View(DiretoriaBusiness.Consulta.FirstOrDefault(p => p.IDDiretoria.Equals(id)));
}
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Atualizar(Diretoria oDiretoria)
{
if (ModelState.IsValid)
{
try
{
DiretoriaBusiness.Alterar(oDiretoria);
TempData["MensagemSucesso"] = "A Diretoria '" + oDiretoria.Sigla + "' foi atualizada com sucesso.";
return Json(new { resultado = new RetornoJSON() { URL = Url.Action("Index", "Diretoria") } });
}
catch (Exception ex)
{
if (ex.GetBaseException() == null)
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.Message } });
}
else
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.GetBaseException().Message } });
}
}
}
else
{
return Json(new { resultado = TratarRetornoValidacaoToJSON() });
}
}
[HttpPost]
public ActionResult TerminarComRedirect(string IDDiretoria)
{
try
{
Diretoria oDiretoria = DiretoriaBusiness.Consulta.FirstOrDefault(p => p.IDDiretoria.Equals(IDDiretoria));
if (oDiretoria == null)
{
return Json(new { resultado = new RetornoJSON() { Erro = "Não foi possível excluir esta Diretoria." } });
}
else
{
oDiretoria.DataExclusao = DateTime.Now;
oDiretoria.UsuarioExclusao = "LoginTeste";
DiretoriaBusiness.Alterar(oDiretoria);
TempData["MensagemSucesso"] = "A Diretoria '" + oDiretoria.Sigla + "' foi excluida com sucesso.";
return Json(new { resultado = new RetornoJSON() { URL = Url.Action("Index", "Diretoria", new { id = IDDiretoria }) } });
}
}
catch (Exception ex)
{
if (ex.GetBaseException() == null)
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.Message } });
}
else
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.GetBaseException().Message } });
}
}
}
private string RenderRazorViewToString(string viewName, object model = null)
{
ViewData.Model = model;
using (var sw = new System.IO.StringWriter())
{
var viewResult = ViewEngines.Engines.FindPartialView(ControllerContext,
viewName);
var viewContext = new ViewContext(ControllerContext, viewResult.View,
ViewData, TempData, sw);
viewResult.View.Render(viewContext, sw);
viewResult.ViewEngine.ReleaseView(ControllerContext, viewResult.View);
return sw.GetStringBuilder().ToString();
}
}
public RetornoJSON TratarRetornoValidacaoToJSON()
{
string msgAlerta = string.Empty;
foreach (ModelState item in ModelState.Values)
{
if (item.Errors.Count > 0)
{
foreach (System.Web.Mvc.ModelError i in item.Errors)
{
if (!string.IsNullOrEmpty(i.ErrorMessage))
msgAlerta += i.ErrorMessage;
else
msgAlerta += i.Exception.Message;
}
}
}
return new RetornoJSON()
{
Alerta = msgAlerta,
Erro = string.Empty,
Sucesso = string.Empty
};
}
}
}<file_sep>using GISModel.DTO.Conta;
using GISModel.Entidades;
namespace GISCore.Business.Abstract
{
public interface IUsuarioBusiness : IBaseBusiness<Usuario>
{
Usuario ValidarCredenciais(AutenticacaoModel autenticacaoModel);
void DefinirSenha(NovaSenhaViewModel novaSenhaViewModel);
void SolicitarAcesso(string email);
byte[] RecuperarAvatar(string login);
void SalvarAvatar(string login, string imageStringBase64, string extensaoArquivo);
}
}
<file_sep>jQuery(function ($) {
if ($("#ContinuoID").click(function () {
if ($(this).is(':checked')) {
$("#IntermitenteID").prop("disabled", true);
$('#EventualID').prop("disabled", true);
}
else{
$("#IntermitenteID").prop("disabled", false);
$('#EventualID').prop("disabled", false);
}
}));
if ($("#IntermitenteID").click(function () {
if ($(this).is(':checked')) {
$("#ContinuoID").prop("disabled", true);
$('#EventualID').prop("disabled", true);
} else {
$("#ContinuoID").prop("disabled", false);
$('#EventualID').prop("disabled", false);
}
}));
if ($("#EventualID").click(function () {
if ($(this).is(':checked')) {
$("#ContinuoID").prop("disabled", true);
$('#IntermitenteID').prop("disabled", true);
} else {
$("#ContinuoID").prop("disabled", false);
$('#IntermitenteID').prop("disabled", false);
}
}));
});
<file_sep>using GISCore.Business.Abstract;
using GISModel.Entidades;
using System;
using System.Collections.Generic;
using System.Configuration;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace GISCore.Business.Concrete
{
public class DocsPorAtividadeBusiness : BaseBusiness<DocsPorAtividade>, IDocsPorAtividadeBusiness
{
public override void Inserir(DocsPorAtividade pDocsPorAtividade)
{
pDocsPorAtividade.IDDocAtividade = Guid.NewGuid().ToString();
// pAtividadeAlocada.Admitido = "Admitido";
base.Inserir(pDocsPorAtividade);
}
public override void Alterar(DocsPorAtividade pDocsPorAtividade)
{
DocsPorAtividade tempDocsPorAtividade = Consulta.FirstOrDefault(p => p.IDDocAtividade.Equals(pDocsPorAtividade.IDDocAtividade));
if (tempDocsPorAtividade == null)
{
throw new Exception("Não foi possível encontrar este Documento");
}
tempDocsPorAtividade.idAtividade = pDocsPorAtividade.idAtividade;
tempDocsPorAtividade.idDocumentosEmpregado = pDocsPorAtividade.idDocumentosEmpregado;
base.Alterar(tempDocsPorAtividade);
}
}
}
<file_sep>jQuery(function ($) {
$(document).on('click', '.toolbar a[data-target]', function (e) {
e.preventDefault();
var target = $(this).data('target');
$('.widget-box.visible').removeClass('visible');//hide others
$(target).addClass('visible');//show target
});
});
function OnBeginDefinirNovaSenha(content) {
$("#login-box").css({ opacity: "0.5" });
}
function OnSuccessDefinirNovaSenha(content) {
$('#login-box').removeAttr('style');
TratarResultadoJSON(content.resultado);
}
function OnSuccessSolicicacaoAcesso(content) {
$('#forgot-box').removeAttr('style');
TratarResultadoJSON(content.resultado);
}
function OnBeginSolicicacaoAcesso(content) {
$("#forgot-box").css({ opacity: "0.5" });
}
function FailMessage(content) {
ExibirMensagemDeErro("Excessão na página de Definir Nova Senha");
}<file_sep>using GISCore.Business.Abstract;
using GISCore.Business.Concrete;
using GISModel.DTO.Shared;
using GISModel.Entidades;
using GISWeb.Infraestrutura.Filters;
using Ninject;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Web;
using System.Web.Mvc;
namespace GISWeb.Controllers
{
public class AdmissaoController : Controller
{
#region
[Inject]
public IAdmissaoBusiness AdmissaoBusiness { get; set; }
[Inject]
public IAtividadesDoEstabelecimentoBusiness AtividadesDoEstabelecimentoBusiness { get; set; }
[Inject]
public IDepartamentoBusiness DepartamentoBusiness { get; set; }
[Inject]
public IEmpresaBusiness EmpresaBusiness { get; set; }
[Inject]
public IEmpregadoBusiness EmpregadoBusiness { get; set; }
[Inject]
public IEstabelecimentoAmbienteBusiness EstabelecimentoImagensBusiness { get; set; }
[Inject]
public IEstabelecimentoBusiness EstabelecimentoBusiness { get; set; }
[Inject]
public IAtividadesDoEstabelecimentoBusiness RiscosDoEstabelecimentoBusiness { get; set; }
[Inject]
public IAlocacaoBusiness AlocacaoBusiness { get; set; }
[Inject]
public IAtividadeAlocadaBusiness AtividadeAlocadaBusiness { get; set; }
[Inject]
public IExposicaoBusiness ExposicaoBusiness { get; set; }
[Inject]
public ITipoDeRiscoBusiness TipoDeRiscoBusiness { get; set; }
[Inject]
public IEventoPerigosoBusiness EventoPerigosoBusiness { get; set; }
[Inject]
public IPossiveisDanosBusiness PossiveisDanosBusiness { get; set; }
[Inject]
public IPerigoPotencialBusiness PerigoPotencialBusiness { get; set; }
#endregion
// GET: EstabelecimentoImagens
//public ActionResult Index(string id)
//{
// ViewBag.Imagens = EstabelecimentoImagensBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao) && (p.IDEstabelecimentoImagens.Equals(id))).ToList();
// return View();
//}
public ActionResult Empresas()
{
ViewBag.Empresas = EmpresaBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList();
return View();
}
public ActionResult EmpregadosPorEmpresa(string idEmpresa)
{
ViewBag.EmpregadoPorEmpresa = AdmissaoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao) && (p.IDEmpresa.Equals(idEmpresa))).ToList();
return View();
}
//passanda IDEmpregado como parametro para montar o perfil
public ActionResult PerfilEmpregado(string id)
{
ViewBag.Perfil = AdmissaoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao) && (p.IDEmpregado.Equals(id))).ToList();
ViewBag.Admissao = AdmissaoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao) && (p.IDEmpregado.Equals(id))&&(p.Admitido=="Admitido")).ToList();
ViewBag.Alocacao = AlocacaoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao) && (p.Admissao.IDEmpregado.Equals(id)) && (p.Ativado == "true")).ToList();
ViewBag.idEmpregado = id;
Admissao oAdmissao = AdmissaoBusiness.Consulta.FirstOrDefault(p => p.IDEmpregado.Equals(id));
//Esta query não deixa pegar todas as atividades se tiver exposição null
List<Exposicao> ListaExposicao = (from ATL in AtividadeAlocadaBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
join ATV in AtividadesDoEstabelecimentoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
on ATL.idAtividadesDoEstabelecimento equals ATV.IDAtividadesDoEstabelecimento
join Est in EstabelecimentoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
on ATV.IDEstabelecimento equals Est.IDEstabelecimento
join ALOC in AlocacaoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
on Est.IDEstabelecimento equals ALOC.idEstabelecimento
join EXP in ExposicaoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
on ATL.IDAtividadeAlocada equals EXP.idAtividadeAlocada
where ALOC.Admissao.IDEmpregado.Equals(id)
select new Exposicao()
{
IDExposicao = EXP.IDExposicao,
TempoEstimado = EXP.TempoEstimado,
EExposicaoCalor = EXP.EExposicaoCalor,
EExposicaoInsalubre = EXP.EExposicaoInsalubre,
EExposicaoSeg = EXP.EExposicaoSeg,
EProbabilidadeSeg = EXP.EProbabilidadeSeg,
ESeveridadeSeg = EXP.ESeveridadeSeg,
AtividadeAlocada = new AtividadeAlocada()
{
idAlocacao = ATL.idAlocacao,
idAtividadesDoEstabelecimento = ATL.idAtividadesDoEstabelecimento,
IDAtividadeAlocada = ATL.IDAtividadeAlocada,
AtividadesDoEstabelecimento = new AtividadesDoEstabelecimento()
{
DescricaoDestaAtividade = ATV.DescricaoDestaAtividade,
Estabelecimento = new Estabelecimento()
{
IDEstabelecimento = Est.IDEstabelecimento,
Descricao = Est.Descricao
}
}
}
}
).ToList();
ViewBag.ListaExposicao = ListaExposicao;
List<AtividadeAlocada> ListaAtividades = (from ATL in AtividadeAlocadaBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
join ATV in AtividadesDoEstabelecimentoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
on ATL.idAtividadesDoEstabelecimento equals ATV.IDAtividadesDoEstabelecimento
join ALOC in AlocacaoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
on ATL.idAlocacao equals ALOC.IDAlocacao
join ADM in AdmissaoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
on ALOC.IdAdmissao equals ADM.IDAdmissao
join Emp in EmpregadoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
on ADM.IDEmpregado equals Emp.IDEmpregado
join Est in EstabelecimentoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
on ATV.IDEstabelecimento equals Est.IDEstabelecimento
where Emp.IDEmpregado.Equals(id)
select new AtividadeAlocada()
{
idAlocacao = ATL.idAlocacao,
idAtividadesDoEstabelecimento = ATL.idAtividadesDoEstabelecimento,
IDAtividadeAlocada = ATL.IDAtividadeAlocada,
Alocacao = new Alocacao()
{
IDAlocacao = ALOC.IDAlocacao,
Admissao =new Admissao()
{
Empregado = new Empregado()
{
Nome = Emp.Nome,
CPF= Emp.CPF,
},
},
},
AtividadesDoEstabelecimento = new AtividadesDoEstabelecimento()
{
DescricaoDestaAtividade = ATV.DescricaoDestaAtividade,
IDAtividadesDoEstabelecimento = ATV.IDAtividadesDoEstabelecimento,
Estabelecimento = new Estabelecimento()
{
IDEstabelecimento = Est.IDEstabelecimento,
Descricao = Est.Descricao
}
}
}
).ToList();
ViewBag.ListaAtividade = ListaAtividades;
//ViewBag.Alocaçao = AlocacaoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao) && (p.Admissao.IDEmpregado.Equals(id)) && (p.Ativado == "Ativado")).ToList();
//verifica se existe exposição para o empregado
var Expo = (from EX in ExposicaoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
join ATA in AtividadeAlocadaBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
on EX.idAtividadeAlocada equals ATA.IDAtividadeAlocada
join AlOC in AlocacaoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
on ATA.idAlocacao equals AlOC.IDAlocacao
join ADM in AdmissaoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
on AlOC.IdAdmissao equals ADM.IDAdmissao
join EMP in EmpregadoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
on ADM.IDEmpregado equals EMP.IDEmpregado
join ATE in AtividadesDoEstabelecimentoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
on ATA.idAtividadesDoEstabelecimento equals ATE.IDAtividadesDoEstabelecimento
where EMP.IDEmpregado.Equals(id)
select new Exposicao()
{
IDExposicao = EX.IDExposicao,
TempoEstimado = EX.TempoEstimado,
EExposicaoCalor = EX.EExposicaoCalor,
EExposicaoInsalubre = EX.EExposicaoInsalubre,
EExposicaoSeg = EX.EExposicaoSeg,
EProbabilidadeSeg = EX.EProbabilidadeSeg,
ESeveridadeSeg = EX.ESeveridadeSeg,
AtividadeAlocada = new AtividadeAlocada()
{
IDAtividadeAlocada = ATA.IDAtividadeAlocada,
idAlocacao = ATA.IDAtividadeAlocada,
idAtividadesDoEstabelecimento =ATA.idAtividadesDoEstabelecimento,
Alocacao = new Alocacao()
{
IDAlocacao = AlOC.IDAlocacao,
},
AtividadesDoEstabelecimento = new AtividadesDoEstabelecimento()
{
IDAtividadesDoEstabelecimento = ATE.IDAtividadesDoEstabelecimento,
DescricaoDestaAtividade = ATE.DescricaoDestaAtividade,
IDEstabelecimento = ATE.IDEstabelecimento,
}
}
}
).ToList();
ViewBag.Expo = Expo;
return View(oAdmissao);
}
public ActionResult ListaExposicao(string idAlocacao,string idAtividadeAlocada, string Nome, string cpf, string idAtividadeEstabelecimento)
{
var Expo = (from EX in ExposicaoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
join ATA in AtividadeAlocadaBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
on EX.idAtividadeAlocada equals ATA.IDAtividadeAlocada
join AlOC in AlocacaoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
on ATA.idAlocacao equals AlOC.IDAlocacao
join ADM in AdmissaoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
on AlOC.IdAdmissao equals ADM.IDAdmissao
join EMP in EmpregadoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
on ADM.IDEmpregado equals EMP.IDEmpregado
join ATE in AtividadesDoEstabelecimentoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
on ATA.idAtividadesDoEstabelecimento equals ATE.IDAtividadesDoEstabelecimento
where EX.idAlocacao.Equals(idAlocacao) && EX.idAtividadeAlocada.Equals(idAtividadeAlocada)
select new Exposicao()
{
IDExposicao = EX.IDExposicao,
TempoEstimado = EX.TempoEstimado,
EExposicaoCalor = EX.EExposicaoCalor,
EExposicaoInsalubre = EX.EExposicaoInsalubre,
EExposicaoSeg = EX.EExposicaoSeg,
EProbabilidadeSeg = EX.EProbabilidadeSeg,
ESeveridadeSeg = EX.ESeveridadeSeg,
AtividadeAlocada = new AtividadeAlocada()
{
IDAtividadeAlocada = ATA.IDAtividadeAlocada,
idAlocacao = ATA.IDAtividadeAlocada,
idAtividadesDoEstabelecimento = ATA.idAtividadesDoEstabelecimento,
Alocacao = new Alocacao()
{
IDAlocacao = AlOC.IDAlocacao,
},
AtividadesDoEstabelecimento = new AtividadesDoEstabelecimento()
{
IDAtividadesDoEstabelecimento = ATE.IDAtividadesDoEstabelecimento,
DescricaoDestaAtividade = ATE.DescricaoDestaAtividade,
IDEstabelecimento = ATE.IDEstabelecimento,
}
},
}
).ToList();
ViewBag.Expo = Expo;
ViewBag.Nome = Nome;
ViewBag.cpf = cpf;
List<Exposicao> ListaExpo = (from EXP in ExposicaoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
join ATL in AtividadeAlocadaBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
on EXP.idAtividadeAlocada equals ATL.IDAtividadeAlocada
join ALOC in AlocacaoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
on ATL.idAlocacao equals ALOC.IDAlocacao
//join TR in TipoDeRiscoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
//on EXP.idTipoDeRisco equals TR.IDTipoDeRisco
//join ATV in AtividadesDoEstabelecimentoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
//on ATL.idAtividadesDoEstabelecimento equals ATV.IDAtividadesDoEstabelecimento
//join Est in EstabelecimentoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
//on ATV.IDEstabelecimento equals Est.IDEstabelecimento
//where ATL.idAtividadesDoEstabelecimento.Equals(idAtividadesDoEstabelecimento)
where EXP.idAlocacao.Equals(idAlocacao) && EXP.idAtividadeAlocada.Equals(idAtividadeAlocada)
select new Exposicao()
{
IDExposicao = EXP.IDExposicao,
TempoEstimado = EXP.TempoEstimado,
EExposicaoCalor = EXP.EExposicaoCalor,
EExposicaoInsalubre = EXP.EExposicaoInsalubre,
EExposicaoSeg = EXP.EExposicaoSeg,
EProbabilidadeSeg = EXP.EProbabilidadeSeg,
ESeveridadeSeg = EXP.ESeveridadeSeg,
AtividadeAlocada = new AtividadeAlocada()
{
idAlocacao = ATL.idAlocacao,
idAtividadesDoEstabelecimento = ATL.idAtividadesDoEstabelecimento,
IDAtividadeAlocada = ATL.IDAtividadeAlocada,
//AtividadesDoEstabelecimento = new AtividadesDoEstabelecimento()
//{
// DescricaoDestaAtividade = ATV.DescricaoDestaAtividade,
// Estabelecimento = new Estabelecimento()
// {
// IDEstabelecimento = Est.IDEstabelecimento,
// Descricao = Est.Descricao
// }
//}
}
}).ToList();
ViewBag.ListaAtividade = ListaExpo;
var TipoRisco = (from EX in ExposicaoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
join TR in TipoDeRiscoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
on EX.idTipoDeRisco equals TR.IDTipoDeRisco
join ATE in AtividadesDoEstabelecimentoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
on TR.idAtividadesDoEstabelecimento equals ATE.IDAtividadesDoEstabelecimento
join ATL in AtividadeAlocadaBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
on EX.idAtividadeAlocada equals ATL.IDAtividadeAlocada
join EP in EventoPerigosoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
on TR.idEventoPerigoso equals EP.IDEventoPerigoso
join PD in PossiveisDanosBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
on TR.idPossiveisDanos equals PD.IDPossiveisDanos
join PP in PerigoPotencialBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
on TR.idPerigoPotencial equals PP.IDPerigoPotencial
where EX.idAlocacao.Equals(idAlocacao) && EX.idAtividadeAlocada.Equals(idAtividadeAlocada)
select new Exposicao()
{
IDExposicao = EX.IDExposicao,
TempoEstimado = EX.TempoEstimado,
EExposicaoCalor = EX.EExposicaoCalor,
EExposicaoInsalubre = EX.EExposicaoInsalubre,
EExposicaoSeg = EX.EExposicaoSeg,
EProbabilidadeSeg = EX.EProbabilidadeSeg,
ESeveridadeSeg = EX.ESeveridadeSeg,
idTipoDeRisco = EX.idTipoDeRisco,
AtividadeAlocada = new AtividadeAlocada()
{
idAtividadesDoEstabelecimento = ATL.idAtividadesDoEstabelecimento
},
TipoDeRisco = new TipoDeRisco()
{
IDTipoDeRisco = TR.IDTipoDeRisco,
EClasseDoRisco = TR.EClasseDoRisco,
FonteGeradora = TR.FonteGeradora,
Tragetoria = TR.Tragetoria,
idPossiveisDanos = TR.idPossiveisDanos,
idEventoPerigoso = TR.idEventoPerigoso,
idPerigoPotencial = TR.idPerigoPotencial,
EventoPerigoso = new EventoPerigoso()
{
Descricao = EP.Descricao
},
PossiveisDanos = new PossiveisDanos()
{
DescricaoDanos = PD.DescricaoDanos
},
PerigoPotencial = new PerigoPotencial()
{
DescricaoEvento = PP.DescricaoEvento
}
}
}
).ToList();
ViewBag.Riscos = TipoRisco;
#region ConsultaEsquerda
//var TipoRisco = from EX in ExposicaoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).DefaultIfEmpty()
// join TR in TipoDeRiscoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
// on EX.idTipoDeRisco equals TR.IDTipoDeRisco into _r
// from _A in _r.DefaultIfEmpty()
// join ATE in AtividadesDoEstabelecimentoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
// on _A.idAtividadesDoEstabelecimento equals ATE.IDAtividadesDoEstabelecimento into _t
// from _T in _r.DefaultIfEmpty()
// join ATL in AtividadeAlocadaBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
// on EX.idAtividadeAlocada equals ATL.IDAtividadeAlocada into _z
// from _Z in _r.DefaultIfEmpty()
// join EP in EventoPerigosoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
// on _A.idEventoPerigoso equals EP.IDEventoPerigoso into _e
// from _E in _r.DefaultIfEmpty()
// join PD in PossiveisDanosBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
// on _A.idPossiveisDanos equals PD.IDPossiveisDanos into _p
// from _P in _r.DefaultIfEmpty()
// join PP in PerigoPotencialBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
// on _A.idPerigoPotencial equals PP.IDPerigoPotencial into _Q
// from _q in _r.DefaultIfEmpty()
// where EX.idAlocacao.Equals(idAlocacao) && EX.idAtividadeAlocada.Equals(idAtividadeAlocada)
// select new
// {
// IDExposicao = EX.IDExposicao,
// TempoEstimado = EX.TempoEstimado,
// EExposicaoCalor = EX.EExposicaoCalor,
// EExposicaoInsalubre = EX.EExposicaoInsalubre,
// EExposicaoSeg = EX.EExposicaoSeg,
// EProbabilidadeSeg = EX.EProbabilidadeSeg,
// ESeveridadeSeg = EX.ESeveridadeSeg,
// idTipoDeRisco = EX.idTipoDeRisco,
// idAtividadesDoEstabelecimento = _Z.idAtividadesDoEstabelecimento,
// IDTipoDeRisco = _A.IDTipoDeRisco,
// EClasseDoRisco = _A.EClasseDoRisco,
// FonteGeradora = _A.FonteGeradora,
// Tragetoria = _A.Tragetoria,
// idPossiveisDanos = _A.idPossiveisDanos,
// idEventoPerigoso = _A.idEventoPerigoso,
// idPerigoPotencial = _A.idPerigoPotencial,
// Descricao = _E.EventoPerigoso.Descricao,
// DescricaoDanos = _P.PossiveisDanos.DescricaoDanos,
// DescricaoEvento = _q.PerigoPotencial.DescricaoEvento
// };
//ViewBag.Riscos = TipoRisco;
#endregion
List<string> risc = new List<string>();
foreach (var iten in TipoRisco)
{
risc.Add(iten.idTipoDeRisco);
}
ViewBag.risc = risc;
ViewBag.totalrisc = risc.Count();
var TodosRiscos = (from TR in TipoDeRiscoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
join ATE in AtividadesDoEstabelecimentoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
on TR.idAtividadesDoEstabelecimento equals ATE.IDAtividadesDoEstabelecimento
join AL in AtividadeAlocadaBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
on ATE.IDAtividadesDoEstabelecimento equals AL.idAtividadesDoEstabelecimento
join EP in EventoPerigosoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
on TR.idEventoPerigoso equals EP.IDEventoPerigoso
join PD in PossiveisDanosBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
on TR.idPossiveisDanos equals PD.IDPossiveisDanos
join PP in PerigoPotencialBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
on TR.idPerigoPotencial equals PP.IDPerigoPotencial
where ATE.IDAtividadesDoEstabelecimento.Equals(idAtividadeEstabelecimento)
select new TipoDeRisco()
{
IDTipoDeRisco = TR.IDTipoDeRisco,
EClasseDoRisco = TR.EClasseDoRisco,
FonteGeradora = TR.FonteGeradora,
Tragetoria = TR.Tragetoria,
idPossiveisDanos = TR.idPossiveisDanos,
idEventoPerigoso = TR.idEventoPerigoso,
idPerigoPotencial = TR.idPerigoPotencial,
EventoPerigoso = new EventoPerigoso()
{
Descricao = EP.Descricao
},
PossiveisDanos = new PossiveisDanos()
{
DescricaoDanos = PD.DescricaoDanos
},
PerigoPotencial = new PerigoPotencial()
{
DescricaoEvento = PP.DescricaoEvento
},
AtividadesDoEstabelecimento = new AtividadesDoEstabelecimento()
{
IDAtividadesDoEstabelecimento = ATE.IDAtividadesDoEstabelecimento,
},
}
).ToList();
ViewBag.TipoRisco = TodosRiscos;
ViewBag.TipoRisc = TodosRiscos.ToString();
ViewBag.Exposi = ListaExpo;
try
{
Exposicao oExposicao = ExposicaoBusiness.Consulta.FirstOrDefault(p => p.idAtividadeAlocada.Equals(idAtividadeAlocada) && p.idAlocacao.Equals(idAlocacao));
if (oExposicao == null)
{
return Json(new { resultado = new RetornoJSON() { Alerta = "Exposição não encontrada. Solicite ao Administrador que cadastre esta exposição!." } });
}
else
{
return Json(new { data = RenderRazorViewToString("_ListaExposicao", oExposicao) });
}
}
catch (Exception ex)
{
if (ex.GetBaseException() == null)
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.Message } });
}
else
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.GetBaseException().Message } });
}
}
}
//Assim que o empregado for demitido, retorne esta visão
public ActionResult EmpregadoDemitido(string id)
{
ViewBag.Demitir = AdmissaoBusiness.Consulta.Where((p=>p.IDAdmissao.Equals(id))).ToList();
return View();
}
public ActionResult EmpregadoAdmitidoDetalhes(string id)
{
ViewBag.empregado = EmpregadoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao) && (p.IDEmpregado.Equals(id))).ToList();
try
{
Admissao oAdmissao = AdmissaoBusiness.Consulta.FirstOrDefault(p => p.IDEmpregado.Equals(id));
if (oAdmissao == null)
{
return Json(new { resultado = new RetornoJSON() { Alerta = "Empregado com CPF '" +id+ "' não encontrado." } });
}
else
{
return Json(new { data = RenderRazorViewToString("_Detalhes", oAdmissao) });
}
}
catch (Exception ex)
{
if (ex.GetBaseException() == null)
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.Message } });
}
else
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.GetBaseException().Message } });
}
}
}
//public ActionResult BuscarDetalhesDosRiscos(string idEstabelecimento)
//{
// ViewBag.Imagens = RiscosDoEstabelecimentoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao) && (p.IDEstabelecimentoImagens.Equals(idEstabelecimento))).ToList();
// try
// {
// AtividadesDoEstabelecimento oRiscosDoEstabelecimento = RiscosDoEstabelecimentoBusiness.Consulta.FirstOrDefault(p => p.IDEstabelecimentoImagens.Equals(idEstabelecimento));
// if (oRiscosDoEstabelecimento == null)
// {
// return Json(new { resultado = new RetornoJSON() { Alerta = "Imagens4 não encontrada." } });
// }
// else
// {
// return Json(new { data = RenderRazorViewToString("_Detalhes", oRiscosDoEstabelecimento) });
// }
// }
// catch (Exception ex)
// {
// if (ex.GetBaseException() == null)
// {
// return Json(new { resultado = new RetornoJSON() { Erro = ex.Message } });
// }
// else
// {
// return Json(new { resultado = new RetornoJSON() { Erro = ex.GetBaseException().Message } });
// }
// }
//}
//public ActionResult BuscarDetalhesEstabelecimentoImagens(string IDEstabelecimento)
//{
// ViewBag.Imagens = EstabelecimentoImagensBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao) && (p.IDEstabelecimento.Equals(IDEstabelecimento))).ToList();
// try
// {
// EstabelecimentoAmbientes oEstabelecimentoImagens = EstabelecimentoImagensBusiness.Consulta.FirstOrDefault(p => p.IDEstabelecimento.Equals(IDEstabelecimento));
// if (oEstabelecimentoImagens == null)
// {
// return Json(new { resultado = new RetornoJSON() { Alerta = "Imagens3 não encontrada." } });
// }
// else
// {
// return Json(new { data = RenderRazorViewToString("_Detalhes", oEstabelecimentoImagens) });
// }
// }
// catch (Exception ex)
// {
// if (ex.GetBaseException() == null)
// {
// return Json(new { resultado = new RetornoJSON() { Erro = ex.Message } });
// }
// else
// {
// return Json(new { resultado = new RetornoJSON() { Erro = ex.GetBaseException().Message } });
// }
// }
//}
public ActionResult Novo(string id)
{
//id do Estabelecimento recebido por parametro
ViewBag.EmpID = id;
ViewBag.Sigla = new SelectList(DepartamentoBusiness.Consulta.ToList(), "IDDepartamento", "Sigla");
ViewBag.Empresas = new SelectList(EmpresaBusiness.Consulta.ToList(), "IDEmpresa", "NomeFantasia");
ViewBag.Admissao = AdmissaoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao) && (p.IDEmpregado.Equals(id))).ToList();
ViewBag.Empregado = EmpregadoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao) && (p.IDEmpregado.Equals(id))).ToList();
//ViewBag.RegistroID = new SelectList(EstabelecimentoImagensBusiness.Consulta, "RegistroID", "Diretoria");
return View();
}
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Cadastrar(Admissao oAdmissao, string EmpID)
{
//id do Estabelecimento recebido por parametro
oAdmissao.IDEmpregado = EmpID;
if (ModelState.IsValid)
{
try
{
AdmissaoBusiness.Inserir(oAdmissao);
TempData["MensagemSucesso"] = "O empregado foi admitido com sucesso.";
//var iAdmin = oAdmissao.IDAdmissao;
return Json(new { resultado = new RetornoJSON() { URL = Url.Action("PerfilEmpregado", "Admissao", new { id = EmpID }) } });
}
catch (Exception ex)
{
if (ex.GetBaseException() == null)
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.Message } });
}
else
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.GetBaseException().Message } });
}
}
}
else
{
return Json(new { resultado = TratarRetornoValidacaoToJSON() });
}
}
public ActionResult Edicao(string id)
{
return View(EstabelecimentoImagensBusiness.Consulta.FirstOrDefault(p => p.IDEstabelecimentoImagens.Equals(id)));
}
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Atualizar(EstabelecimentoAmbiente oEstabelecimentoImagens)
{
if (ModelState.IsValid)
{
try
{
EstabelecimentoImagensBusiness.Alterar(oEstabelecimentoImagens);
TempData["MensagemSucesso"] = "A imagem '" + oEstabelecimentoImagens.NomeDaImagem + "' foi atualizada com sucesso.";
return Json(new { resultado = new RetornoJSON() { URL = Url.Action("Index", "EstabelecimentoImagens") } });
}
catch (Exception ex)
{
if (ex.GetBaseException() == null)
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.Message } });
}
else
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.GetBaseException().Message } });
}
}
}
else
{
return Json(new { resultado = TratarRetornoValidacaoToJSON() });
}
}
[HttpPost]
public ActionResult Terminar(string IDAdmissao)
{
try
{
Admissao oAdmissao = AdmissaoBusiness.Consulta.FirstOrDefault(p => p.IDAdmissao.Equals(IDAdmissao));
if (oAdmissao == null)
{
return Json(new { resultado = new RetornoJSON() { Erro = "Não foi possível excluir esta Admissão." } });
}
else
{
oAdmissao.DataExclusao = DateTime.Now;
oAdmissao.UsuarioExclusao = "LoginTeste";
oAdmissao.Admitido = "Demitido";
AdmissaoBusiness.Alterar(oAdmissao);
return Json(new { resultado = new RetornoJSON() { Sucesso = "O Empregado '" + oAdmissao.Empregado.Nome + "' foi demitido com sucesso." } });
}
}
catch (Exception ex)
{
if (ex.GetBaseException() == null)
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.Message } });
}
else
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.GetBaseException().Message } });
}
}
}
[HttpPost]
public ActionResult TerminarComRedirect(string IDAdmissao)
{
try
{
Admissao oAdmissao = AdmissaoBusiness.Consulta.FirstOrDefault(p => p.IDAdmissao.Equals(IDAdmissao));
if (oAdmissao == null)
{
return Json(new { resultado = new RetornoJSON() { Erro = "Não foi possível excluir esta Admissão." } });
}
else
{
oAdmissao.DataExclusao = DateTime.Now;
oAdmissao.UsuarioExclusao = "LoginTeste";
oAdmissao.Admitido = "Demitido";
AdmissaoBusiness.Alterar(oAdmissao);
TempData["MensagemSucesso"] = "O Empregado '" + oAdmissao.Empregado.Nome + "' foi demitido com sucesso.";
return Json(new { resultado = new RetornoJSON() { URL = Url.Action("EmpregadoDemitido", "Admissao", new { id = IDAdmissao }) } });
}
}
catch (Exception ex)
{
if (ex.GetBaseException() == null)
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.Message } });
}
else
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.GetBaseException().Message } });
}
}
}
[RestritoAAjax]
public ActionResult _Upload()
{
try
{
return PartialView("_Upload");
}
catch (Exception ex)
{
Response.StatusCode = 500;
return Content(ex.Message, "text/html");
}
}
[HttpPost]
[RestritoAAjax]
[ValidateAntiForgeryToken]
public ActionResult Upload()
{
try
{
string fName = string.Empty;
string msgErro = string.Empty;
foreach (string fileName in Request.Files.AllKeys)
{
HttpPostedFileBase oFile = Request.Files[fileName];
fName = oFile.FileName;
if (oFile != null)
{
string sExtensao = oFile.FileName.Substring(oFile.FileName.LastIndexOf("."));
if (sExtensao.ToUpper().Contains("PNG") || sExtensao.ToUpper().Contains("JPG") || sExtensao.ToUpper().Contains("JPEG") || sExtensao.ToUpper().Contains("GIF"))
{
//Após a autenticação está totalmente concluída, mudar para incluir uma pasta com o Login do usuário
string sLocalFile = Path.Combine(Path.GetTempPath(), "GIS");
sLocalFile = Path.Combine(sLocalFile, DateTime.Now.ToString("yyyyMMdd"));
sLocalFile = Path.Combine(sLocalFile, "Estabelecimento");
sLocalFile = Path.Combine(sLocalFile, "LoginTeste");
if (!System.IO.Directory.Exists(sLocalFile))
Directory.CreateDirectory(sLocalFile);
else
{
//Tratamento de limpar arquivos da pasta, pois o usuário pode estar apenas alterando o arquivo.
//Limpar para não ficar lixo.
//O arquivo que for salvo abaixo será limpado após o cadastro.
//Se o usuário cancelar o cadastro, a rotina de limpar diretórios ficará responsável por limpá-lo.
foreach (string iFile in System.IO.Directory.GetFiles(sLocalFile))
{
System.IO.File.Delete(iFile);
}
}
sLocalFile = Path.Combine(sLocalFile, oFile.FileName);
oFile.SaveAs(sLocalFile);
}
else
{
throw new Exception("Extensão do arquivo não permitida.");
}
}
}
if (string.IsNullOrEmpty(msgErro))
return Json(new { sucesso = "O upload do arquivo '" + fName + "' foi realizado com êxito.", arquivo = fName, erro = msgErro });
else
return Json(new { erro = msgErro });
}
catch (Exception ex)
{
return Json(new { erro = ex.Message });
}
}
private string RenderRazorViewToString(string viewName, object model = null)
{
ViewData.Model = model;
using (var sw = new System.IO.StringWriter())
{
var viewResult = ViewEngines.Engines.FindPartialView(ControllerContext,
viewName);
var viewContext = new ViewContext(ControllerContext, viewResult.View,
ViewData, TempData, sw);
viewResult.View.Render(viewContext, sw);
viewResult.ViewEngine.ReleaseView(ControllerContext, viewResult.View);
return sw.GetStringBuilder().ToString();
}
}
public RetornoJSON TratarRetornoValidacaoToJSON()
{
string msgAlerta = string.Empty;
foreach (ModelState item in ModelState.Values)
{
if (item.Errors.Count > 0)
{
foreach (System.Web.Mvc.ModelError i in item.Errors)
{
msgAlerta += i.ErrorMessage;
}
}
}
return new RetornoJSON()
{
Alerta = msgAlerta,
Erro = string.Empty,
Sucesso = string.Empty
};
}
}
}
<file_sep>using GISModel.DTO.Shared;
using System;
using System.Web;
using System.Web.Mvc;
namespace GISWeb.Controllers
{
public class BaseController : Controller
{
public ActionResult ChangeLanguage(string lang)
{
new LanguageMang().SetLanguage(lang);
return RedirectToAction("Index", "Home");
}
protected override IAsyncResult BeginExecuteCore(AsyncCallback callback, object state)
{
string lang = null;
HttpCookie langCookie = Request.Cookies["culture"];
if (langCookie != null)
{
lang = langCookie.Value;
}
else
{
var userLanguage = Request.UserLanguages;
var userLang = userLanguage != null ? userLanguage[0] : "";
if (userLang != "")
{
lang = userLang;
}
else
{
lang = LanguageMang.GetDefaultLanguage();
}
}
new LanguageMang().SetLanguage(lang);
return base.BeginExecuteCore(callback, state);
}
public RetornoJSON TratarRetornoValidacaoToJSON()
{
string msgAlerta = string.Empty;
foreach (ModelState item in ModelState.Values)
{
if (item.Errors.Count > 0)
{
foreach (System.Web.Mvc.ModelError i in item.Errors)
{
if (!string.IsNullOrEmpty(msgAlerta))
msgAlerta += ", ";
msgAlerta += i.ErrorMessage;
}
}
}
return new RetornoJSON()
{
Alerta = msgAlerta,
Erro = string.Empty,
Sucesso = string.Empty
};
}
public string RenderRazorViewToString(string viewName, object model = null)
{
ViewData.Model = model;
using (var sw = new System.IO.StringWriter())
{
var viewResult = ViewEngines.Engines.FindPartialView(ControllerContext,
viewName);
var viewContext = new ViewContext(ControllerContext, viewResult.View,
ViewData, TempData, sw);
viewResult.View.Render(viewContext, sw);
viewResult.ViewEngine.ReleaseView(ControllerContext, viewResult.View);
return sw.GetStringBuilder().ToString();
}
}
}
}<file_sep>using GISModel.Enums;
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace GISModel.Entidades
{
[Table("tbTipoDeRisco")]
public class TipoDeRisco: EntidadeBase
{
[Key]
public string IDTipoDeRisco { get; set; }
[Display(Name ="Descrição do evento Perigoso")]
public string idPerigoPotencial { get; set; }
[Display(Name ="Possíveis Danos a Saúde")]
public string idPossiveisDanos { get; set; }
public string idEventoPerigoso { get; set; }
[Display(Name = "Atividade do Estabelecimento")]
public string idAtividadesDoEstabelecimento { get; set; }
[Display(Name ="Classifique o Risco")]
public EClasseDoRisco EClasseDoRisco { get; set; }
[Display(Name = "Fonte Geradora")]
public string FonteGeradora { get; set; }
[Display(Name = "Tragetória")]
public string Tragetoria { get; set; }
[Display(Name ="Vincular")]
public bool Vinculado { get; set; }
public virtual EventoPerigoso EventoPerigoso { get; set; }
public virtual PossiveisDanos PossiveisDanos { get; set; }
public virtual PerigoPotencial PerigoPotencial { get; set; }
public virtual AtividadesDoEstabelecimento AtividadesDoEstabelecimento {get; set;}
//public virtual ICollection<AtividadesDoEstabelecimento> AtividadesDoEstabelecimento { get; set; }
}
}
<file_sep>using GISModel.Entidades;
using System.Data.Entity;
namespace GISCore.Repository.Configuration
{
public class SESTECContext : DbContext
{
public SESTECContext() : base("SESTECConection")
{
Database.SetInitializer<SESTECContext>(null);
}
public DbSet<DocAtividade> DocAtividade { get; set; }
public DbSet<DocsPorAtividade> DocsPorAtividade { get; set; }
public DbSet<DocumentosPessoal> DocumentosPessoal { get; set; }
public DbSet<AnaliseRisco> AnaliseRisco { get; set; }
public DbSet<Rel_AtivEstabTipoRisco> Rel_AtivEstabTipoRisco { get; set; }
public DbSet<Empresa> Empresa { get; set; }
public DbSet<Departamento> Departamento { get; set; }
public DbSet<Estabelecimento> Estabelecimento { get; set; }
public DbSet<Contrato> Contrato { get; set; }
public DbSet<AtividadesDoEstabelecimento> AtividadesDoEstabelecimento { get; set; }
public DbSet<TipoDeRisco> TipoDeRisco { get; set; }
public DbSet<MedidasDeControleExistentes> MedidasDeControleExistentes { get; set; }
public DbSet<PossiveisDanos> PossiveisDanos { get; set; }
public DbSet<EventoPerigoso> EventoPerigoso { get; set; }
public DbSet<EstabelecimentoAmbiente> EstabelecimentoAmbiente { get; set; }
public DbSet<Empregado> Empregado { get; set; }
public DbSet<Admissao> Admissao { get; set; }
public DbSet<Cargo> Cargo { get; set; }
public DbSet<Funcao> Funcao { get; set; }
public DbSet<Atividade> Atividade { get; set; }
public DbSet<Alocacao> Alocacao { get; set; }
public DbSet<Equipe> Equipe { get; set; }
public DbSet<AtividadeAlocada> AtividadeAlocada { get; set; }
public DbSet<PlanoDeAcao> PlanoDeAcao { get; set; }
public DbSet<Exposicao> Exposicao { get; set; }
public DbSet<PerigoPotencial> PerigoPotencial { get; set; }
public DbSet<Usuario> Usuario { get; set; }
}
}
<file_sep>using GISCore.Business.Abstract;
using GISCore.Business.Concrete;
using GISModel.DTO.Shared;
using GISModel.Entidades;
using GISWeb.Infraestrutura.Filters;
using Ninject;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Web;
using System.Web.Mvc;
namespace GISWeb.Controllers
{
public class ExposicaoController : Controller
{
#region
[Inject]
public IAdmissaoBusiness AdmissaoBusiness { get; set; }
[Inject]
public IDepartamentoBusiness DepartamentoBusiness { get; set; }
[Inject]
public IEmpresaBusiness EmpresaBusiness { get; set; }
[Inject]
public IEmpregadoBusiness EmpregadoBusiness { get; set; }
[Inject]
public IEstabelecimentoAmbienteBusiness EstabelecimentoImagensBusiness { get; set; }
[Inject]
public IEstabelecimentoBusiness EstabelecimentoBusiness { get; set; }
[Inject]
public IAtividadesDoEstabelecimentoBusiness AtividadesDoEstabelecimentoBusiness { get; set; }
[Inject]
public IAlocacaoBusiness AlocacaoBusiness { get; set; }
[Inject]
public IAtividadeBusiness AtividadeDeRiscoBusiness { get; set; }
[Inject]
public IExposicaoBusiness ExposicaoBusiness { get; set; }
[Inject]
public IAtividadeAlocadaBusiness AtividadeAlocadaBusiness { get; set; }
[Inject]
public ITipoDeRiscoBusiness TipoDeRiscoBusiness { get; set; }
#endregion
public ActionResult Novo(Exposicao oExposicao, string IDAtividadeAlocada, string idAlocacao, string idTipoDeRisco, string idEmpregado)
{
if(ExposicaoBusiness.Consulta.Any(p=>p.idAtividadeAlocada.Equals(IDAtividadeAlocada) && p.idTipoDeRisco.Equals(idTipoDeRisco)))
{
return Json(new { resultado = new RetornoJSON() { Alerta = "Já existe uma exposição para esta Alocação!" } });
}
else {
ViewBag.AtivAloc = IDAtividadeAlocada;
ViewBag.IDaloc = idAlocacao;
ViewBag.IDRisc= idTipoDeRisco;
ViewBag.IdEmpregado = idEmpregado;
ViewBag.Imagens = AtividadeAlocadaBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao) && (p.IDAtividadeAlocada.Equals(IDAtividadeAlocada))).ToList();
//var Riscos = (from TP in TipoDeRiscoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
// join ATE in AtividadesDoEstabelecimentoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
// on TP.idAtividadesDoEstabelecimento equals ATE.IDAtividadesDoEstabelecimento
// join ATA in AtividadeAlocadaBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
// on ATE.IDAtividadesDoEstabelecimento equals ATA.idAtividadesDoEstabelecimento
// where TP.IDTipoDeRisco.Equals(idTipoDeRisco)
// select new TipoDeRisco()
// {
// IDTipoDeRisco = TP.IDTipoDeRisco,
// idPossiveisDanos = TP.idPossiveisDanos,
// idAtividadesDoEstabelecimento = TP.idAtividadesDoEstabelecimento,
// idEventoPerigoso = TP.idEventoPerigoso,
// idPerigoPotencial = TP.idPerigoPotencial,
// EClasseDoRisco = TP.EClasseDoRisco,
// FonteGeradora = TP.FonteGeradora,
// Tragetoria = TP.Tragetoria
// }).ToList();
// ViewBag.Riscos = Riscos;
var EXPO = (from TR in TipoDeRiscoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
join ATE in AtividadesDoEstabelecimentoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao)).ToList()
on TR.idAtividadesDoEstabelecimento equals ATE.IDAtividadesDoEstabelecimento
where TR.IDTipoDeRisco.Equals(idTipoDeRisco)
select new TipoDeRisco()
{
IDTipoDeRisco = TR.IDTipoDeRisco,
idPossiveisDanos = TR.idPossiveisDanos,
idAtividadesDoEstabelecimento = TR.idAtividadesDoEstabelecimento,
idEventoPerigoso = TR.idEventoPerigoso,
idPerigoPotencial = TR.idPerigoPotencial,
EClasseDoRisco = TR.EClasseDoRisco,
FonteGeradora = TR.FonteGeradora,
Tragetoria = TR.Tragetoria
}).ToList();
ViewBag.Riscos = EXPO;
var Aloc = (from a in AlocacaoBusiness.Consulta.Where(p => string.IsNullOrEmpty(p.UsuarioExclusao) && (p.IDAlocacao.Equals(idAlocacao))).ToList()
//group a by a.IDAlocacao into g
select new
{
id = a.IDAlocacao,
//lista = g.Key,
}
).ToList();
List<string> Filtro = new List<string>();
var filtro = "";
foreach ( var item in Aloc)
{
filtro=item.id;
}
List<string> model = Filtro;
ViewBag.IDaloc = filtro;
try
{
if (oExposicao == null)
{
return Json(new { resultado = new RetornoJSON() { Alerta = "Imagens não encontrada." } });
}
else
{
return Json(new { data = RenderRazorViewToString("_Novo", oExposicao) });
}
}
catch (Exception ex)
{
if (ex.GetBaseException() == null)
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.Message } });
}
else
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.GetBaseException().Message } });
}
}
}
//return View();
}
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Cadastrar(Exposicao oExposicao, string idAtividadeAlocada,string idAlocacao, string idTipoDeRisco, string idEmpregado)
{
if (ModelState.IsValid)
{
try
{
oExposicao.idAtividadeAlocada = idAtividadeAlocada;
oExposicao.idAlocacao = idAlocacao;
oExposicao.idTipoDeRisco = idTipoDeRisco;
ExposicaoBusiness.Inserir(oExposicao);
TempData["MensagemSucesso"] = "A Exposição foi registrada com sucesso.";
//return Json(new { data = RenderRazorViewToString("_DetalhesAmbienteAlocado", oExposicao) });
return Json(new { resultado = new RetornoJSON() { URL = Url.Action("PerfilEmpregado", "Admissao", new { id = idEmpregado}) } });
//return Json(new { resultado = new RetornoJSON() { Sucesso = "Exposição Cadastrada com sucesso!" } });
}
catch (Exception ex)
{
if (ex.GetBaseException() == null)
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.Message } });
}
else
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.GetBaseException().Message } });
}
}
}
else
{
return Json(new { resultado = TratarRetornoValidacaoToJSON() });
}
}
public ActionResult Edicao(string id)
{
return View(EstabelecimentoImagensBusiness.Consulta.FirstOrDefault(p => p.IDEstabelecimentoImagens.Equals(id)));
}
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Atualizar(EstabelecimentoAmbiente oEstabelecimentoImagens)
{
if (ModelState.IsValid)
{
try
{
EstabelecimentoImagensBusiness.Alterar(oEstabelecimentoImagens);
TempData["MensagemSucesso"] = "A imagem '" + oEstabelecimentoImagens.NomeDaImagem + "' foi atualizada com sucesso.";
return Json(new { resultado = new RetornoJSON() { URL = Url.Action("Index", "EstabelecimentoImagens") } });
}
catch (Exception ex)
{
if (ex.GetBaseException() == null)
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.Message } });
}
else
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.GetBaseException().Message } });
}
}
}
else
{
return Json(new { resultado = TratarRetornoValidacaoToJSON() });
}
}
[HttpPost]
public ActionResult Terminar(string IDEstebelecimentoImagens)
{
try
{
EstabelecimentoAmbiente oEstabelecimentoImagens = EstabelecimentoImagensBusiness.Consulta.FirstOrDefault(p => p.IDEstabelecimentoImagens.Equals(IDEstebelecimentoImagens));
if (oEstabelecimentoImagens == null)
{
return Json(new { resultado = new RetornoJSON() { Erro = "Não foi possível excluir a empresa, pois a mesma não foi localizada." } });
}
else
{
//oEmpresa.DataExclusao = DateTime.Now;
oEstabelecimentoImagens.UsuarioExclusao = "LoginTeste";
EstabelecimentoImagensBusiness.Alterar(oEstabelecimentoImagens);
return Json(new { resultado = new RetornoJSON() { Sucesso = "A imagem '" + oEstabelecimentoImagens.NomeDaImagem + "' foi excluída com sucesso." } });
}
}
catch (Exception ex)
{
if (ex.GetBaseException() == null)
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.Message } });
}
else
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.GetBaseException().Message } });
}
}
}
[HttpPost]
public ActionResult TerminarComRedirect(string IDEstebelecimentoImagens)
{
try
{
EstabelecimentoAmbiente oEstabelecimentoImagens = EstabelecimentoImagensBusiness.Consulta.FirstOrDefault(p => p.IDEstabelecimentoImagens.Equals(IDEstebelecimentoImagens));
if (oEstabelecimentoImagens == null)
{
return Json(new { resultado = new RetornoJSON() { Erro = "Não foi possível excluir a imagem '" + oEstabelecimentoImagens.NomeDaImagem + "', pois a mesma não foi localizada." } });
}
else
{
//oEmpresa.DataExclusao = DateTime.Now;
oEstabelecimentoImagens.UsuarioExclusao = "LoginTeste";
EstabelecimentoImagensBusiness.Alterar(oEstabelecimentoImagens);
TempData["MensagemSucesso"] = "A imagem '" + oEstabelecimentoImagens.NomeDaImagem + "' foi excluída com sucesso.";
return Json(new { resultado = new RetornoJSON() { URL = Url.Action("Index", "EstabelecimentoImagens") } });
}
}
catch (Exception ex)
{
if (ex.GetBaseException() == null)
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.Message } });
}
else
{
return Json(new { resultado = new RetornoJSON() { Erro = ex.GetBaseException().Message } });
}
}
}
[RestritoAAjax]
public ActionResult _Upload()
{
try
{
return PartialView("_Upload");
}
catch (Exception ex)
{
Response.StatusCode = 500;
return Content(ex.Message, "text/html");
}
}
[HttpPost]
[RestritoAAjax]
[ValidateAntiForgeryToken]
public ActionResult Upload()
{
try
{
string fName = string.Empty;
string msgErro = string.Empty;
foreach (string fileName in Request.Files.AllKeys)
{
HttpPostedFileBase oFile = Request.Files[fileName];
fName = oFile.FileName;
if (oFile != null)
{
string sExtensao = oFile.FileName.Substring(oFile.FileName.LastIndexOf("."));
if (sExtensao.ToUpper().Contains("PNG") || sExtensao.ToUpper().Contains("JPG") || sExtensao.ToUpper().Contains("JPEG") || sExtensao.ToUpper().Contains("GIF"))
{
//Após a autenticação está totalmente concluída, mudar para incluir uma pasta com o Login do usuário
string sLocalFile = Path.Combine(Path.GetTempPath(), "GIS");
sLocalFile = Path.Combine(sLocalFile, DateTime.Now.ToString("yyyyMMdd"));
sLocalFile = Path.Combine(sLocalFile, "Estabelecimento");
sLocalFile = Path.Combine(sLocalFile, "LoginTeste");
if (!System.IO.Directory.Exists(sLocalFile))
Directory.CreateDirectory(sLocalFile);
else
{
//Tratamento de limpar arquivos da pasta, pois o usuário pode estar apenas alterando o arquivo.
//Limpar para não ficar lixo.
//O arquivo que for salvo abaixo será limpado após o cadastro.
//Se o usuário cancelar o cadastro, a rotina de limpar diretórios ficará responsável por limpá-lo.
foreach (string iFile in System.IO.Directory.GetFiles(sLocalFile))
{
System.IO.File.Delete(iFile);
}
}
sLocalFile = Path.Combine(sLocalFile, oFile.FileName);
oFile.SaveAs(sLocalFile);
}
else
{
throw new Exception("Extensão do arquivo não permitida.");
}
}
}
if (string.IsNullOrEmpty(msgErro))
return Json(new { sucesso = "O upload do arquivo '" + fName + "' foi realizado com êxito.", arquivo = fName, erro = msgErro });
else
return Json(new { erro = msgErro });
}
catch (Exception ex)
{
return Json(new { erro = ex.Message });
}
}
private string RenderRazorViewToString(string viewName, object model = null)
{
ViewData.Model = model;
using (var sw = new System.IO.StringWriter())
{
var viewResult = ViewEngines.Engines.FindPartialView(ControllerContext,
viewName);
var viewContext = new ViewContext(ControllerContext, viewResult.View,
ViewData, TempData, sw);
viewResult.View.Render(viewContext, sw);
viewResult.ViewEngine.ReleaseView(ControllerContext, viewResult.View);
return sw.GetStringBuilder().ToString();
}
}
public RetornoJSON TratarRetornoValidacaoToJSON()
{
string msgAlerta = string.Empty;
foreach (ModelState item in ModelState.Values)
{
if (item.Errors.Count > 0)
{
foreach (System.Web.Mvc.ModelError i in item.Errors)
{
msgAlerta += i.ErrorMessage;
}
}
}
return new RetornoJSON()
{
Alerta = msgAlerta,
Erro = string.Empty,
Sucesso = string.Empty
};
}
}
}
<file_sep>using Newtonsoft.Json;
using System;
using System.Collections.Generic;
using System.Configuration;
using System.Linq;
using System.Net;
using System.Web;
using System.Web.Mvc;
namespace GISWeb.Infraestrutura.Helpers
{
public static class reCaptchaHelper
{
public class reCaptchaResponse
{
[JsonProperty("success")]
public bool Success { get; set; }
[JsonProperty("error-codes")]
public List<string> ErrorCodes { get; set; }
}
public static IHtmlString reCaptcha(this HtmlHelper helper)
{
string publicKey = string.Empty;
if (Convert.ToBoolean(ConfigurationManager.AppSettings["AD:DMZ"]))
publicKey = ConfigurationManager.AppSettings["Recaptcha:Public:Internet"];
else
publicKey = ConfigurationManager.AppSettings["Recaptcha:Public:Intranet"];
string componente = "<div class=\"g-recaptcha\" data-sitekey=\"" + publicKey + "\"></div>";
return MvcHtmlString.Create(componente);
}
public static void Validate(string recaptchaResponse)
{
if (string.IsNullOrWhiteSpace(recaptchaResponse))
throw new Exception("A resposta do usuário de validação antibot está faltando.");
else if (Convert.ToBoolean(ConfigurationManager.AppSettings["Recaptcha:GoogleServerSideIntegration"]))
{
string privateKey = string.Empty;
if (Convert.ToBoolean(ConfigurationManager.AppSettings["AD:DMZ"]))
privateKey = ConfigurationManager.AppSettings["Recaptcha:Private:Internet"];
else
privateKey = ConfigurationManager.AppSettings["Recaptcha:Private:Intranet"];
var client = new WebClient();
var reply = client.DownloadString(string.Format("https://www.google.com/recaptcha/api/siteverify?secret={0}&response={1}", privateKey, recaptchaResponse));
var captchaResponse = JsonConvert.DeserializeObject<reCaptchaResponse>(reply);
if (!captchaResponse.Success)
{
if (captchaResponse.ErrorCodes.Count > 0)
{
string erros = string.Empty;
foreach (string erro in captchaResponse.ErrorCodes)
switch (erro.ToLower())
{
case ("missing-input-secret"):
erros += "A chave secreta de validação antibot está faltando. ";
break;
case ("invalid-input-secret"):
erros += "A chave secreta de validação antibot está inválida. ";
break;
case ("missing-input-response"):
erros += "A resposta do usuário de validação antibot está faltando. ";
break;
case ("invalid-input-response"):
erros += "A resposta do usuário de validação antibot está inválida. ";
break;
default:
erros += "Ocorreu algum erro na validação antibot. Por favor, tente novamente. ";
break;
}
erros = erros.Remove(erros.Length - 1);
throw new Exception(erros);
}
}
}
}
}
}<file_sep>namespace GISCore.Business.Concrete
{
public class UsuarioPerfisMenusViewModel
{
}
}
|
9831fe408d9daaf0ca2ec5cb93f7f36aebbb39a0
|
[
"JavaScript",
"C#"
] | 42
|
C#
|
tonihenriques/GestaoSST
|
82280fa2c4f0c264d2ed290dc899e387dd339b59
|
4de9f3cf851d3b16123a6acf3aa301103b7c7253
|
refs/heads/master
|
<file_sep>-- MySQL Script generated by MySQL Workbench
-- Thu Apr 5 15:31:09 2018
-- Model: New Model Version: 1.0
-- MySQL Workbench Forward Engineering
SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0;
SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0;
SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='TRADITIONAL,ALLOW_INVALID_DATES';
-- -----------------------------------------------------
-- Schema PHPDieppe
-- -----------------------------------------------------
-- -----------------------------------------------------
-- Schema PHPDieppe
-- -----------------------------------------------------
CREATE SCHEMA IF NOT EXISTS `PHPDieppe` DEFAULT CHARACTER SET utf8 ;
USE `PHPDieppe` ;
-- -----------------------------------------------------
-- Table `PHPDieppe`.`T_ROLES`
-- -----------------------------------------------------
CREATE TABLE IF NOT EXISTS `PHPDieppe`.`T_ROLES` (
`ID_ROLE` INT NOT NULL AUTO_INCREMENT,
`ROLLABEL` VARCHAR(45) NOT NULL,
PRIMARY KEY (`ID_ROLE`))
ENGINE = InnoDB;
-- -----------------------------------------------------
-- Table `PHPDieppe`.`T_USERS`
-- -----------------------------------------------------
CREATE TABLE IF NOT EXISTS `PHPDieppe`.`T_USERS` (
`ID_USERS` INT NOT NULL AUTO_INCREMENT,
`USENAME` VARCHAR(45) NOT NULL,
`USEFIRSTNAME` VARCHAR(80) NULL,
`USEMAIL` VARCHAR(160) NOT NULL,
`USEPASSWORD` CHAR(40) NOT NULL,
`ID_ROLE` INT NOT NULL,
PRIMARY KEY (`ID_USERS`, `ID_ROLE`),
INDEX `fk_T_USERS_T_ROLES_idx` (`ID_ROLE` ASC),
CONSTRAINT `fk_T_USERS_T_ROLES`
FOREIGN KEY (`ID_ROLE`)
REFERENCES `PHPDieppe`.`T_ROLES` (`ID_ROLE`)
ON DELETE NO ACTION
ON UPDATE NO ACTION)
ENGINE = InnoDB;
-- -----------------------------------------------------
-- Table `PHPDieppe`.`T_ARTICLES`
-- -----------------------------------------------------
CREATE TABLE IF NOT EXISTS `PHPDieppe`.`T_ARTICLES` (
`ID_ARTICLES` INT NOT NULL AUTO_INCREMENT,
`ARTTITLE` VARCHAR(200) NOT NULL,
`ARTCHAPO` VARCHAR(200) NULL,
`ARTCONTENT` TEXT NOT NULL,
`ARTDATETIME` DATETIME NULL,
`T_USERS_ID_USERS` INT NOT NULL,
`T_USERS_ID_ROLE` INT NOT NULL,
PRIMARY KEY (`ID_ARTICLES`, `T_USERS_ID_USERS`, `T_USERS_ID_ROLE`),
INDEX `fk_T_ARTICLES_T_USERS1_idx` (`T_USERS_ID_USERS` ASC, `T_USERS_ID_ROLE` ASC),
CONSTRAINT `fk_T_ARTICLES_T_USERS1`
FOREIGN KEY (`T_USERS_ID_USERS` , `T_USERS_ID_ROLE`)
REFERENCES `PHPDieppe`.`T_USERS` (`ID_USERS` , `ID_ROLE`)
ON DELETE NO ACTION
ON UPDATE NO ACTION)
ENGINE = InnoDB;
-- -----------------------------------------------------
-- Table `PHPDieppe`.`T_CATEGORIES`
-- -----------------------------------------------------
CREATE TABLE IF NOT EXISTS `PHPDieppe`.`T_CATEGORIES` (
`ID_CATEGORIES` INT NOT NULL AUTO_INCREMENT,
`CATLABEL` VARCHAR(120) NOT NULL,
PRIMARY KEY (`ID_CATEGORIES`))
ENGINE = InnoDB;
-- -----------------------------------------------------
-- Table `PHPDieppe`.`T_ARTICLES_has_T_CATEGORIES`
-- -----------------------------------------------------
CREATE TABLE IF NOT EXISTS `PHPDieppe`.`T_ARTICLES_has_T_CATEGORIES` (
`T_ARTICLES_ID_ARTICLES` INT NOT NULL,
`T_ARTICLES_T_USERS_ID_USERS` INT NOT NULL,
`T_ARTICLES_T_USERS_ID_ROLE` INT NOT NULL,
`T_CATEGORIES_ID_CATEGORIES` INT NOT NULL,
PRIMARY KEY (`T_ARTICLES_ID_ARTICLES`, `T_ARTICLES_T_USERS_ID_USERS`, `T_ARTICLES_T_USERS_ID_ROLE`, `T_CATEGORIES_ID_CATEGORIES`),
INDEX `fk_T_ARTICLES_has_T_CATEGORIES_T_CATEGORIES1_idx` (`T_CATEGORIES_ID_CATEGORIES` ASC),
INDEX `fk_T_ARTICLES_has_T_CATEGORIES_T_ARTICLES1_idx` (`T_ARTICLES_ID_ARTICLES` ASC, `T_ARTICLES_T_USERS_ID_USERS` ASC, `T_ARTICLES_T_USERS_ID_ROLE` ASC),
CONSTRAINT `fk_T_ARTICLES_has_T_CATEGORIES_T_ARTICLES1`
FOREIGN KEY (`T_ARTICLES_ID_ARTICLES` , `T_ARTICLES_T_USERS_ID_USERS` , `T_ARTICLES_T_USERS_ID_ROLE`)
REFERENCES `PHPDieppe`.`T_ARTICLES` (`ID_ARTICLES` , `T_USERS_ID_USERS` , `T_USERS_ID_ROLE`)
ON DELETE NO ACTION
ON UPDATE NO ACTION,
CONSTRAINT `fk_T_ARTICLES_has_T_CATEGORIES_T_CATEGORIES1`
FOREIGN KEY (`T_CATEGORIES_ID_CATEGORIES`)
REFERENCES `PHPDieppe`.`T_CATEGORIES` (`ID_CATEGORIES`)
ON DELETE NO ACTION
ON UPDATE NO ACTION)
ENGINE = InnoDB;
SET SQL_MODE=@OLD_SQL_MODE;
SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS;
SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS;
<file_sep><?php
include "./functions/callPage.php";
?>
<!DOCTYPE html>
<html lang="fr">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="ie=edge">
<link href="./assets/css/style.css" type="text/css" rel="stylesheet" />
<title>Site Dieppe</title>
</head>
<body>
<div id="container">
<?php include "./includes/header.php"; ?>
<main>
<?php
callPage();
?>
</main>
<?php include "./includes/footer.php"; ?>
</div>
</body>
</html>
<file_sep><footer>
<p>© <?php echo date('Y'); ?> - Dieppe Agglo <abbr title="corporation"> Corp</abbr></p>
</footer>
<file_sep># php
# phpdieppe
<file_sep><h1>Registration</h1>
<?php
if (isset($_POST['frmRegistration'])){
//syntaxe classique
/*if (isset($_POST['nom'])) {
$nom = $_POST['nom'];
}
else {
$nom = "";
}*/
//operateur ternaire
//$nom = isset($_POST['nom']) ? $_POST['nom'] : "";
//operateur null coalescent php 7
$nom= $_POST['nom'] ?? "";
//tableau
$prenom = $_POST['prenom'] ?? "";
$mail = $_POST['mail'] ?? "";
$mdp = $_POST['mdp'] ?? "";
//erreur si rien n'est remplie
$erreurs = array();
if ($nom == "") array_push($erreurs, "Veuillez saisir votre nom");
if ($prenom == "") array_push($erreurs, "Veuillez saisir votre prenom");
if ($mail == "") array_push($erreurs, "Veuillez saisir votre mail");
if ($mdp == "") array_push($erreurs, "Veuillez saisir votre mot de passe");
/* autre methode
if (count($erreurs) > 0) {
$message = "<ul>;
foreach($erreurs as $ligneMessage) {
$message .= "<li>";
$message .= $ligneMessage;
$message .= "</li>;
}
$message .= "</ul>";
echo $message;
include "frmRegistration.php";
}
*/
//afficher les erreurs
if(count($erreurs) > 0){
$message = "<ul>";
//-----------
for ($i = 0 ; $i < count($erreurs) ; $i++) {
$message .= "<li>";
$message .= $erreurs[$i];
$message .= "</li>";
}
$message .= "</ul>";
echo $message;
include "frmRegistration.php";
}
else {
$mdp = sha1($mdp);
//adresse serveur / nom utilisateur / mdp / nom basse de donne
$connection = mysqli_connect("localhost", "christopher", "talen", "phpdieppe");
$requete = "INSERT INTO T_USERS
(USENAME, USEFIRSTNAME, USEMAIL, USEPASSWORD, ID_ROLE)
VALUES ('$nom', '$prenom', '$mail', '$mdp', 3)";
if (!$connection) {
die("erreur MySQL" . mysqli_connect_errno() . " | " . mysqli_connect_error());
}
else{
if(mysqli_query($connection, $requete)) {
echo "donnée enregistrees";
}
else {
echo "erreur";
include "frmRegistration.php";
}
mysqli_close($connection);
}
}
}
else {
echo "je ne viens pas du formulaire";
include "frmRegistration.php";
}
|
c45d198a4df7718ed5c225e3ceb309333a14c8c8
|
[
"Markdown",
"SQL",
"PHP"
] | 5
|
SQL
|
jarvis76/php
|
d6de919e8cae8bc9798635e6c6760e96a04bab92
|
a3188299eea277b1950f4a98b14a40bcdb9c60e3
|
refs/heads/master
|
<repo_name>rpitonak/colin<file_sep>/colin/checks/abstract/containers.py
from colin.checks.abstract.abstract_check import AbstractCheck
class ContainerCheck(AbstractCheck):
pass
<file_sep>/colin/checks/labels/name.py
from colin.checks.abstract.dockerfile import LabelCheck
class NameCheck(LabelCheck):
def __init__(self):
super().__init__(name="name_label_required",
message="",
description="",
reference_url="",
tags=["name", "label", "required"],
label="name",
required=True,
value_regex=None)
<file_sep>/colin/checks/labels/com_redhat_component.py
from colin.checks.abstract.dockerfile import LabelCheck
class ComRedhatComponentCheck(LabelCheck):
def __init__(self):
super().__init__(name="com_redhat_component_label_required",
message="",
description="",
reference_url="",
tags=["com.redhat.component", "label", "required"],
label="com.redhat.component",
required=True,
value_regex=None)
<file_sep>/colin/checks/abstract/dockerfile.py
from colin.checks.abstract.abstract_check import AbstractCheck
class DockerfileCheck(AbstractCheck):
pass
class InstructionCheck(AbstractCheck):
def __init__(self, name, message, description, reference_url, tags, instruction, regex, required):
super().__init__(name, message, description, reference_url, tags)
self.instruction = instruction
self.regex = regex
self.required = required
def check(self):
pass
class InstructionCountCheck(AbstractCheck):
def __init__(self, name, message, description, reference_url, tags, instruction, min_count=None, max_count=None):
super().__init__(name, message, description, reference_url, tags)
self.instruction = instruction
self.min_count = min_count
self.max_count = max_count
def check(self):
pass
class LabelCheck(AbstractCheck):
def __init__(self, name, message, description, reference_url, tags, label, required, value_regex=None):
super().__init__(name, message, description, reference_url, tags)
self.label = label
self.required = required
self.value_regex = value_regex
def check(self):
pass
<file_sep>/README.md
# Colin
Tool to check generic rules/best-practices for containers/images/dockerfiles, not a framework for making custom container tests.
<file_sep>/colin/checks/labels/maintainer.py
from colin.checks.abstract.dockerfile import LabelCheck
class MaintainerCheck(LabelCheck):
def __init__(self):
super().__init__(name="maintainer_label_required",
message="",
description="",
reference_url="",
tags=["maintainer", "label", "required"],
label="maintainer",
required=True,
value_regex=None)
<file_sep>/colin/checks/result.py
class AbstractResult(object):
def __init__(self, ok, status, description, message):
super().__init__()
self.passed = ok
self.status = status
self.description = description
self.message = message
class DockerfileCheckResult(AbstractResult):
def __init__(self, ok, status, description, message, lines=None, correction_diff=None):
super().__init__(ok, status, description, message)
self.lines = lines
self.correction_diff = correction_diff
class ContainerCheckResult(AbstractResult):
def __init__(self, ok, status, description, message, logs):
super().__init__(ok, status, description, message)
self.logs = logs
class ImageCheckResult(AbstractResult):
def __init__(self, ok, status, description, message, logs):
super().__init__(ok, status, description, message)
self.logs = logs
<file_sep>/colin/checks/abstract/abstract_check.py
class AbstractCheck(object):
def __init__(self, name, message, description, reference_url, tags):
super().__init__()
self.name = name
self.message = message
self.desription = description
self.reference_url = reference_url
self.tags = tags
def check(self):
pass
|
0d328a5fd6718d5e58c20118a1cee307834ed570
|
[
"Markdown",
"Python"
] | 8
|
Python
|
rpitonak/colin
|
93a8b193b5f1285ceeb66d7a97170de9a06f662a
|
aa76cc303616ea19d85fcd44822930ffa1b6eeb0
|
refs/heads/master
|
<repo_name>Poludzku/Lightbox<file_sep>/app/src/main/java/com/poludzku/lightbox/lightbox/di/LightboxModule.java
package com.poludzku.lightbox.lightbox.di;
import android.arch.lifecycle.MutableLiveData;
import android.content.ContentResolver;
import com.poludzku.lightbox.app.di.qualifier.ForAlbum;
import com.poludzku.lightbox.app.di.qualifier.ForHistory;
import com.poludzku.lightbox.app.di.scope.PerActivity;
import com.poludzku.lightbox.app.model.Image;
import com.poludzku.lightbox.lightbox.domain.HandleImageIntentUseCase;
import com.poludzku.lightbox.lightbox.presenter.LightboxPresenterImpl;
import com.poludzku.lightbox.lightbox.presenter.LightboxPresenter;
import com.poludzku.lightbox.lightbox.view.MainView;
import java.util.List;
import dagger.Module;
import dagger.Provides;
import io.reactivex.disposables.CompositeDisposable;
@Module
public class LightboxModule {
private MainView view;
public LightboxModule(MainView view) {
this.view = view;
}
@Provides
@PerActivity
MainView mainView() {
return view;
}
@Provides
@PerActivity
CompositeDisposable compositeDisposable() {
return new CompositeDisposable();
}
@Provides
@PerActivity
LightboxPresenter lightboxPresenter(LightboxPresenterImpl lightboxPresenter) {
return lightboxPresenter;
}
@Provides
@PerActivity
HandleImageIntentUseCase handleImageIntentUseCase(
ContentResolver contentResolver,
CompositeDisposable compositeDisposable,
@ForAlbum MutableLiveData<List<Image>> albumImageLiveData,
@ForHistory MutableLiveData<List<Image>> historyImageLiveData) {
return new HandleImageIntentUseCase(
contentResolver,
compositeDisposable,
albumImageLiveData,
historyImageLiveData);
}
}
<file_sep>/app/src/main/java/com/poludzku/lightbox/browser/tab/di/ImageListModule.java
package com.poludzku.lightbox.browser.tab.di;
import com.poludzku.lightbox.browser.tab.view.ImageListAdapter;
import com.poludzku.lightbox.browser.tab.view.ImageListAdapterImpl;
import dagger.Module;
import dagger.Provides;
@Module
public class ImageListModule {
@Provides
ImageListAdapter imageListAdapter(ImageListAdapterImpl imageListAdapter) {
return imageListAdapter;
}
}
<file_sep>/app/src/main/java/com/poludzku/lightbox/app/activity/BaseActivity.java
package com.poludzku.lightbox.app.activity;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import com.poludzku.lightbox.app.LightboxApplication;
import com.poludzku.lightbox.app.di.AppComponent;
import butterknife.ButterKnife;
public abstract class BaseActivity extends AppCompatActivity {
protected abstract int getLayout();
protected abstract void initInjection();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(getLayout());
ButterKnife.bind(this);
initInjection();
}
protected final AppComponent getAppComponent() {
return ((LightboxApplication) getApplication())
.getComponent();
}
}
<file_sep>/app/src/main/java/com/poludzku/lightbox/browser/main/view/Browser.java
package com.poludzku.lightbox.browser.main.view;
import android.arch.lifecycle.MutableLiveData;
import android.arch.lifecycle.Observer;
import android.content.Intent;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.design.widget.FloatingActionButton;
import android.support.design.widget.TabLayout;
import android.support.v4.view.ViewPager;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.view.Menu;
import android.view.MenuItem;
import com.poludzku.lightbox.R;
import com.poludzku.lightbox.app.LightboxApplication;
import com.poludzku.lightbox.app.di.qualifier.ForAlbum;
import com.poludzku.lightbox.app.model.Image;
import com.poludzku.lightbox.browser.main.di.BrowserComponent;
import com.poludzku.lightbox.browser.main.di.BrowserModule;
import java.util.List;
import javax.inject.Inject;
import butterknife.BindView;
import butterknife.ButterKnife;
public class Browser extends AppCompatActivity {
@BindView(R.id.container)
ViewPager mViewPager;
@BindView(R.id.toolbar)
Toolbar toolbar;
@BindView(R.id.tabs)
TabLayout tabLayout;
@BindView(R.id.fab)
FloatingActionButton floatingActionButton;
@Inject
SectionsPagerAdapter mSectionsPagerAdapter;
BrowserComponent browserComponent;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_browser);
ButterKnife.bind(this);
setSupportActionBar(toolbar);
browserComponent = ((LightboxApplication) getApplication()).getComponent()
.plus(new BrowserModule(getSupportFragmentManager(),this));
browserComponent.inject(this);
mViewPager.addOnPageChangeListener(new TabLayout.TabLayoutOnPageChangeListener(tabLayout));
mViewPager.addOnPageChangeListener(new FabVisibilityHandler(floatingActionButton));
mViewPager.setAdapter(mSectionsPagerAdapter);
tabLayout.addOnTabSelectedListener(new TabLayout.ViewPagerOnTabSelectedListener(mViewPager));
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_browser, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
int id = item.getItemId();
return id == R.id.action_settings || super.onOptionsItemSelected(item);
}
public BrowserComponent getBrowserComponent() {
return browserComponent;
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
}
}
<file_sep>/app/src/main/java/com/poludzku/lightbox/lightbox/LightboxActivity.java
package com.poludzku.lightbox.lightbox;
import android.arch.lifecycle.MutableLiveData;
import android.arch.lifecycle.Observer;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.widget.ImageView;
import com.poludzku.lightbox.R;
import com.poludzku.lightbox.app.activity.BaseActivity;
import com.poludzku.lightbox.app.di.qualifier.ForHistory;
import com.poludzku.lightbox.app.model.Image;
import com.poludzku.lightbox.lightbox.di.LightboxModule;
import com.poludzku.lightbox.lightbox.presenter.LightboxPresenter;
import com.poludzku.lightbox.lightbox.view.MainView;
import java.util.List;
import javax.inject.Inject;
import butterknife.BindView;
public class LightboxActivity extends BaseActivity implements MainView {
@BindView(R.id.image_container)
ImageView imageContainer;
@Inject
LightboxPresenter lightboxPresenter;
@Inject
@ForHistory
MutableLiveData<List<Image>> historyLiveData;
@Override
protected int getLayout() {
return R.layout.activity_main;
}
@Override
protected void initInjection() {
getAppComponent()
.plus(new LightboxModule((this)))
.inject(this);
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
historyLiveData.observe(this, images -> {
if(!images.isEmpty()){
imageContainer.setImageBitmap(images.get(images.size()-1).bitmap());
} else {
imageContainer.setImageBitmap(null);
}
});
lightboxPresenter.processImageIntent(getIntent());
}
@Override
protected void onStop() {
lightboxPresenter.cleanUp();
super.onStop();
}
@Override
public void showImages(List<Image> result) {
//TODO
}
@Override
public void showError(Throwable throwable) {
//TODO
}
}
<file_sep>/app/src/main/java/com/poludzku/lightbox/browser/main/di/BrowserComponent.java
package com.poludzku.lightbox.browser.main.di;
import com.poludzku.lightbox.app.di.scope.PerActivity;
import com.poludzku.lightbox.browser.main.view.Browser;
import com.poludzku.lightbox.browser.tab.di.ImageListComponent;
import com.poludzku.lightbox.browser.tab.di.ImageListModule;
import dagger.Subcomponent;
@Subcomponent(modules = {BrowserModule.class})
@PerActivity
public interface BrowserComponent {
void inject(Browser activity);
ImageListComponent plus(ImageListModule imageListModule);
}
<file_sep>/app/src/main/java/com/poludzku/lightbox/lightbox/presenter/LightboxPresenter.java
package com.poludzku.lightbox.lightbox.presenter;
import android.content.Intent;
public interface LightboxPresenter {
void processImageIntent(Intent intent);
void cleanUp();
}
<file_sep>/app/src/main/java/com/poludzku/lightbox/app/model/Image.java
package com.poludzku.lightbox.app.model;
import android.graphics.Bitmap;
import android.net.Uri;
import android.support.annotation.Nullable;
import com.google.auto.value.AutoValue;
import java.util.List;
@AutoValue
public abstract class Image {
Transformation imageTransformation;
public static Builder builder() {
return new AutoValue_Image.Builder();
}
public abstract Uri uri();
public abstract Bitmap bitmap();
public abstract Transformation transformation();
@Nullable
public abstract List<String> tags();
public abstract Builder toBuilder();
@AutoValue.Builder
public abstract static class Builder {
public abstract Builder uri(Uri uri);
public abstract Builder bitmap(Bitmap bitmap);
public abstract Builder transformation(Transformation transformation);
public abstract Builder tags(List<String> tags);
public abstract Image build();
}
/**
* We recognise images by {@link #uri()}
*
* @param o
* @return true when Uri matches
*/
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Image image = (Image) o;
return image.uri().equals(uri());
}
}<file_sep>/app/build.gradle
apply plugin: 'com.android.application'
ext {
//RELEASE SIGNATURE
releaseSignatureProperties = new Properties()
releaseSignatureProperties.load(new FileInputStream(project.file('../../keystores/poludzku-lightbox-capstone.properties')))
releaseSignatureKeystore = '../../keystores/poludzku-lightbox-capstone.keystore'
//DEBUG SIGNATURE
debugSignatureKeystore = '../../keystores/poludzku-lightbox-capstone-debug.keystore'
//LIBRARY VERSIONS
autoValueVersion = '1.4.1'
rxJavaVersion = '2.0.9'
rxAndroidVersion = '2.0.1'
butterKnifeVersion = '8.5.1'
daggerVersion = '2.10'
jrs250Version = '1.0'
appCompatVersion = '27.0.2'
architectureComponentsVersion = '1.0.0'
playServicesVersion = '11.6.2'
testingSupportVersion = '0.5'
mockitoVersion = '2.7.22'
assertjVersion = '3.5.2'
assertJAndroidVersion = '1.1.1'
junitVersion = '4.12'
espressoVersion = '2.2.2'
testingSupportVersion = '0.5'
timberVersion = '4.6.0'
}
android {
compileSdkVersion 27
buildToolsVersion "27.0.2"
defaultConfig {
applicationId "com.poludzku.lightbox"
minSdkVersion 21
targetSdkVersion 27
versionCode 1
versionName "1.0"
testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
}
signingConfigs {
releaseConfig {
storeFile file(releaseSignatureKeystore)
storePassword releaseSignatureProperties.keystore_password
keyAlias releaseSignatureProperties.key_alias
keyPassword releaseSignatureProperties.key_password
}
debugConfig {
storeFile file(debugSignatureKeystore)
storePassword '<PASSWORD>'
keyAlias 'androiddebugkey'
keyPassword '<PASSWORD>'
}
}
buildTypes {
debug {
minifyEnabled false
useProguard false
debuggable true
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
signingConfig signingConfigs.debugConfig
}
release {
minifyEnabled false
useProguard false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
signingConfig signingConfigs.releaseConfig
}
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
}
dependencies {
implementation fileTree(dir: 'libs', include: ['*.jar'])
implementation "com.jakewharton.timber:timber:$timberVersion"
implementation "com.android.support:appcompat-v7:$appCompatVersion"
implementation "com.android.support:design:$appCompatVersion"
implementation "com.android.support:support-annotations:$appCompatVersion"
implementation "com.google.android.gms:play-services-drive:$playServicesVersion"
implementation "com.google.android.gms:play-services-auth:$playServicesVersion"
implementation "android.arch.lifecycle:runtime:$architectureComponentsVersion"
implementation "android.arch.lifecycle:extensions:$architectureComponentsVersion"
annotationProcessor "android.arch.lifecycle:compiler:$architectureComponentsVersion"
//INJECTION
implementation "javax.annotation:jsr250-api:$jrs250Version"
implementation "com.google.dagger:dagger:$daggerVersion"
implementation 'com.android.support.constraint:constraint-layout:1.0.2'
annotationProcessor "com.google.dagger:dagger-compiler:$daggerVersion"
//VIEW INJECTION
implementation "com.jakewharton:butterknife:$butterKnifeVersion"
annotationProcessor "com.jakewharton:butterknife-compiler:$butterKnifeVersion"
//AUTOVALUE
provided "com.google.auto.value:auto-value:$autoValueVersion"
annotationProcessor "com.google.auto.value:auto-value:$autoValueVersion"
//RX
implementation "io.reactivex.rxjava2:rxjava:$rxJavaVersion"
implementation "io.reactivex.rxjava2:rxandroid:$rxAndroidVersion"
//UNIT TESTING
testCompile "junit:junit:$junitVersion"
testCompile "org.mockito:mockito-core:$mockitoVersion"
testCompile "org.mockito:mockito-android:$mockitoVersion"
testCompile "org.assertj:assertj-core:$assertjVersion"
testCompile("com.squareup.assertj:assertj-android:$assertJAndroidVersion") {
exclude group: 'com.android.support', module: 'support-annotations'
}
// ESPRESSO INSTRUMENTATION TESTING
//TODO remove excludes once library is updated
androidTestCompile("com.android.support.test.espresso:espresso-core:$espressoVersion") {
exclude module: 'support-annotations'
exclude module: 'appcompat-v7'
exclude module: 'design'
exclude module: 'recyclerview-v7'
exclude module: 'support-v4'
}
androidTestCompile("com.android.support.test.espresso:espresso-contrib:$espressoVersion") {
exclude module: 'support-annotations'
exclude module: 'appcompat-v7'
exclude module: 'design'
exclude module: 'recyclerview-v7'
exclude module: 'support-v4'
}
androidTestCompile("com.android.support.test.espresso:espresso-intents:$espressoVersion") {
exclude module: 'support-annotations'
}
androidTestCompile("com.android.support.test:runner:$testingSupportVersion") {
exclude module: 'support-annotations'
}
androidTestCompile("com.android.support.test:rules:$testingSupportVersion") {
exclude module: 'support-annotations'
}
androidTestCompile("com.squareup.assertj:assertj-android:$assertJAndroidVersion") {
exclude module: 'support-annotations'
}
implementation 'com.android.support:cardview-v7:27.0.2'
}
<file_sep>/app/src/main/java/com/poludzku/lightbox/app/di/MemoryRepositoryModule.java
package com.poludzku.lightbox.app.di;
import android.arch.lifecycle.MutableLiveData;
import com.poludzku.lightbox.app.di.qualifier.ForAlbum;
import com.poludzku.lightbox.app.di.qualifier.ForHistory;
import com.poludzku.lightbox.app.di.scope.PerApplication;
import com.poludzku.lightbox.app.model.Image;
import java.util.List;
import dagger.Module;
import dagger.Provides;
@Module
public class MemoryRepositoryModule {
@PerApplication
@Provides
@ForAlbum
public MutableLiveData<List<Image>> albumRepository() {
return new MutableLiveData<>();
}
@PerApplication
@Provides
@ForHistory
public MutableLiveData<List<Image>> historyRepository() {
return new MutableLiveData<>();
}
}
<file_sep>/app/src/main/java/com/poludzku/lightbox/app/LightboxApplication.java
package com.poludzku.lightbox.app;
import android.app.Application;
import com.poludzku.lightbox.app.di.AppComponent;
import com.poludzku.lightbox.app.di.AppModule;
import com.poludzku.lightbox.app.di.DaggerAppComponent;
import timber.log.Timber;
public class LightboxApplication extends Application {
private AppComponent component;
@Override
public void onCreate() {
super.onCreate();
Timber.plant(new Timber.DebugTree());
component = DaggerAppComponent.builder().appModule(new AppModule(this)).build();
}
public AppComponent getComponent() {
return component;
}
}
<file_sep>/app/src/main/java/com/poludzku/lightbox/browser/tab/di/ImageListComponent.java
package com.poludzku.lightbox.browser.tab.di;
import com.poludzku.lightbox.browser.tab.view.ImageListFragment;
import dagger.Subcomponent;
@Subcomponent(modules = {ImageListModule.class})
public interface ImageListComponent {
void inject(ImageListFragment imageListFragment);
}
<file_sep>/app/src/main/java/com/poludzku/lightbox/lightbox/view/MainView.java
package com.poludzku.lightbox.lightbox.view;
import com.poludzku.lightbox.app.model.Image;
import java.util.List;
/**
* Created by Jacek on 17/07/2017.
*/
public interface MainView {
void showImages(List<Image> result);
void showError(Throwable throwable);
}
|
2867d7cbb13801449d558c0477def93a7f6dcbc6
|
[
"Java",
"Gradle"
] | 13
|
Java
|
Poludzku/Lightbox
|
b2472e41086ebe2e73bf49fff35e5356597dd8a2
|
a660b15a4b59af55b9b320cf304554ed8f8425fd
|
refs/heads/master
|
<file_sep>import Vue from 'vue'
import ElementUI from 'element-ui'
import VueResource from 'vue-resource'
import 'element-ui/lib/theme-default/index.css'//element ui 所需css
import App from './App.vue'//引入主vue模板
//使用vue组件
Vue.use(ElementUI)
Vue.use(VueResource)
//初始化vue,将app.vue渲染到index.html中<div id="app"></div>处
new Vue({
el: '#app',
render: h => h(App),
})<file_sep>//使用时在主js里这样加上即可
//var router=require("./server_api_usart");
//app.use("/api",router);
var SerialPort = require("serialport");
var express = require('express');
var router = express.Router();
var expressWs = require('express-ws')(router);
function usartsend(str) {
usertport.write(str, function(err) {
if (err) {
return console.log('Error on write: ', err.message);
}
console.log('message written');
});
}
function startlistenusart() {
usertport.on('data', function(data) {
console.log('Data: ' + data);
wsobj.send(String(data));
});
}
router.get('/getports', function(req, res) {
SerialPort.list(function(err, ports) {
if (err) {
console.log(err);
} else {
/*
let resstr = '';
ports.forEach(each => {
resstr += each.comName + '(';
resstr += each.manufacturer + ');';
})*/
res.json(ports);
}
});
});
router.get('/closeport', function(req, res) {
usertport.close(err=>{
if(err){
console.log(err);
}else{
res.send("ok");
console.log('close port success');
}
})
});
var usertport = null;
router.post('/setport', function(req, res) {
console.log(req.body);
usertport = new SerialPort(req.body.portName, {
baudRate: req.body.baudRate,
dataBits: req.body.dataBits,
stopBits: req.body.stopBits,
autoOpen: true
}, function(err) {
if (err) {
console.log(err);
res.send("open port error");
} else {
startlistenusart(); //接通串口后,监听串口,并通过websocket发送到前台
res.send('ok');
}
});
})
var wsobj=null;
router.ws('/ws', function(ws, req) {
//util.inspect(ws);
ws.on('message', function(msg) {
console.log('message received:');
console.log(msg);
usartsend(msg); //将websocket信息发送到串口
wsobj=ws;
});
});
module.exports = router;<file_sep>var express = require('express');
var logger = require('morgan'); //用于记录日志
var bodyParser = require('body-parser'); //express中间件,用于解析post数据等
var app = express();
var expressWs = require('express-ws')(app); //express中间件,用于websocket
var util = require('util'); //nodejs基础函数库
var SerialPort = require("serialport");
//启用中间件:静态文件路由
app.use('/static', express.static('../static')); //其他静态文件
app.use('/staticdisk', express.static('../dist')); //webpack静态文件
//启用中间件:打印log到控制台
app.use(logger('dev'));
//启用中间件:解析器
app.use(bodyParser.urlencoded({
extended: false
}));
app.use(bodyParser.json());
//处理个别性ico报错
app.get('/favicon.ico', function(req, res) {
res.redirect('/static/favicon.ico');
});
//路由示例,便于不同路由分别在不同js文件,便于维护
var router=require("./server_api_usart");
app.use("/api/usart",router);
app.get('/', function(req, res) {
res.redirect('/staticdisk/index.html');
});
//启动服务器,开始监听请求
var server = app.listen(8080, function() {
var host = server.address().address;
var port = server.address().port;
console.log('Listening at http://%s:%s', host, port);
}); <file_sep>var SerialPort = require("serialport");
var port = new SerialPort('COM4', {
baudRate: 115200,
dataBits: 8,
stopBits: 1,
autoOpen: true
});
SerialPort.list(function (err, ports) {
ports.forEach(function(port) {
console.log("portname:"+port.comName);
console.log("pnpId:"+port.pnpId);
console.log("manufacturer:"+port.manufacturer);
});
});
port.on('open', function() {
port.write('test1(1,2)', function(err) {
if (err) {
return console.log('Error on write: ', err.message);
}
console.log('message written');
});
});
// open errors will be emitted as an error event
port.on('error', function(err) {
console.log('Error: ', err.message);
});
port.on('data', function (data) {
console.log('Data: ' + data);
});
<file_sep>var express = require('express');
var logger = require('morgan'); //用于记录日志
var bodyParser = require('body-parser'); //express中间件,用于解析post数据等
var app = express();
//启用中间件:静态文件路由
app.use('/static', express.static('../static')); //其他静态文件
app.use('/staticdisk', express.static('../dist')); //webpack静态文件
//启用中间件:打印log到控制台
app.use(logger('dev'));
//启用中间件:解析器
app.use(bodyParser.urlencoded({
extended: false
}));
app.use(bodyParser.json());
//处理个别性ico报错
app.get('/favicon.ico', function(req, res) {
res.redirect('/static/favicon.ico');
});
//路由示例,便于不同路由分别在不同js文件,便于维护
var api = require("./server_api");
app.use("/api", api);
var dbmodel=require("./dbmodel");
app.use("/api/dbmodel", dbmodel);
app.get('/', function(req, res) {
var ip=req.ip.split('.').pop();
if(ip==='::1'||ip==='202'||ip==='124'){//本机,pc,手机可访问
res.redirect('/staticdisk/index.html');
}else{
res.send("forbidden ip");
}
});
//启动服务器,开始监听请求
var server = app.listen(8080, function() {
var host = server.address().address;
var port = server.address().port;
console.log('Listening at http://%s:%s', host, port);
});
|
578ec9a1608654609677b347852d47d7298f1ac7
|
[
"JavaScript"
] | 5
|
JavaScript
|
leidawt/webapp
|
e2f0abd284257f5c119a81cf98812f26abce3d92
|
60167dd31de331d019445b0f080e041554e46bda
|
refs/heads/main
|
<repo_name>AhmedYoussef95/Ecoli-dynamic-interactome<file_sep>/Scripts/computeRemodelingScores.R
#' @name computeRemodelingScores
#' @title Compute PPI remodeling scores
#'
#' @description Compute conditional change in PPI similarity scores relative
#' to reference condition.
#'
#' @param conditionalSimilarities Data frame with PPI conditional similarity
#' scores. Output of \code{computeConditionalSimilarities}.
#'
#' @param referenceCondition Either name of reference condition or index of
#' reference condition column in \code{conditionalSimilarities}
#'
#' @return Data frame. Remodeling score for each PPI in each condition.
#' The higher the score, the more likley the PPI is disrupted.
#'
#' @examples
#' remodelingScores <- computeRemodelingScores(conditionalSimilarities,
#' referenceCondition = "LB")
#'
#' @import dplyr magrittr WGCNA reshape2
NULL
#' @export
computeRemodelingScores <- function(conditionalSimilarities,
referenceCondition,){
#compute remodeling scores as difference between each condition and reference condition
ppiRemodelScores <- lapply(colnames(conditionalSimilarities)[-c(1,2)],
function(curCond){
return(conditionalSimilarities[,referenceCondition] -
conditionalSimilarities[,curCond])
}) %>%
setNames(conditions) %>%
cbind(conditionalSimilarities[,c(1,2)], .) %>%
select(-referenceCondition)
return(ppiRemodelScores)
}<file_sep>/Scripts/formatMaxQuantPeptides.R
#' @name formatMaxQuantPeptides
#' @title Format MaxQuant peptides file
#'
#' @description Formats TMT-multiplexed peptides.txt file from MaxQuant output to
#' programming-friendly peptide-by-fraction tables.
#'
#' @param file_path Character vector giving the list of local paths
#' to the peptides.txt files. One file per replicate.
#' @param conditions Character vector with names of experimental conditions.
#'
#' @return List of lists. One list per replicate named \code{replicate_X}.
#' Each replicate list consists of one peptide-by-fraction data frame per condition.
#' The first column of data frame has the peptide sequence,
#' the second column has the gene name,
#' and the following columns contain the corrected reporter intensities for the fractions.
#' Only the peptides that were detected in all replicates are retained.
#'
#' @examples
#' peptides <- formatMaxQuantPeptides(c("Documents/peptides_rep1.txt", "Documents/peptides_rep2.txt"),
#' conditions = c("Brain", "Lung", "Liver"))
#'
#' @import dplyr data.table
NULL
#' @export
formatMaxQuantPeptides <- function(file_path, conditions){
#iterate over files (one per replicate)
peps <- lapply(file_path, function(file){
#read in MaxQuant file
peps <- as.data.frame(data.table::fread(file))
#make sure correct columns are present
if(sum(c("Sequence", "Proteins") %in% colnames(peps)) < 2){
stop("Incorrect format. Please input peptides.txt file from MaxQuant output.")
}
#remove contaminants
peps <- peps[-grep("CON", peps$Proteins),]
#remove reverse decoys in peptides
peps <- peps[-grep("REV", peps$Sequence),]
#keep relevant columns only (peptide/protein name, reporter corrected intensity values)
peps <- peps[,c("Sequence", "Gene names",
grep(pattern = "corrected", x = colnames(peps), value = T))][,-c(3:(length(conditions)+2))]
#create empty list to store data for each condition
conds <- vector(mode = "list", length = length(conditions))
names(conds) <- conditions
#create peptide/protein table list for easier downstream processing
#loop over conditions
for(i in seq_along(conditions)){
#get relevant condition
tmp <- peps[ , c(1, 2, grep(pattern = paste0("Reporter intensity corrected ",i-1,"[[:blank:]][[:alpha:]]"), x = colnames(peps)))]
#rename column names
colnames(tmp) <- c("sequence", "protein",
gsub(pattern = paste0("Reporter intensity corrected ", i-1, "[[:blank:]][[:alpha:]]"), replacement = "", x = colnames(tmp)[-c(1,2)]))
#reorder columns by fractions
tmp <- tmp[,c(1, 2, order(as.numeric(colnames(tmp)[-c(1,2)]))+2)]
#set rownames to unique sequences
rownames(tmp) <- tmp[,1]
#add to list
conds[[i]] <- tmp
}
#change variable names and remove temporary variables
peps <- conds
rm(conds); rm(tmp)
#remove nameless proteins
peps <- lapply(peps, function(curCond){return(curCond[curCond$protein != "",])})
})
#name lists according to corresponding replicate
names(peps) <- paste0("replicate_", seq(length(peps)))
#if multiple replicates present
if(length(peps) > 1){
## retain overlapping peptides only ##
#get all peptides detected in each replicate
allPeps <- lapply(peps, function(curRep){
return(unlist(lapply(curRep, "[", 1)))
})
#find overlapping peptides
commonPeps <- Reduce(intersect, allPeps)
#subset tables to overlapping peptides only
peps <- lapply(peps, function(curRep){
return(lapply(curRep, function(curCond){
return(curCond[commonPeps,])
}))
})
}
return(peps)
}
<file_sep>/README.md
# Dynamic remodeling of the E. coli interactome in response to environmental perturbations
*See the [pre-print](https://www.authorea.com/users/602611/articles/633380-dynamic-remodeling-of-escherichia-coli-interactome-in-response-to-environmental-perturbations?commit=26cb401024abe8cce3117ce3e44ece4078c18892)!*
# Summary
* This is an R analysis pipeline for scoring protein interaction remodeling from co-fractionation mass spectrometry (CF/MS) data
* This pipeline was developed to profile interactome remodeling from CF/MS data of *E. coli* grown under 10 different growth media but is applicable to other studies of interactome remodeling (e.g. disease subtypes, brain regions, tissue atlases)
* This pipeline assigns quantitative remodeling scores to each protein-protein interaction (PPI) instead of predicting PPI presence/absence
* Each PPI is assigned a remodeling score for each input sample relative to a designated reference/baseline sample
* The PPI remodeling scores can easily be summarized at the levels of: individual proteins, complexes, pathways, etc.
# The pipeline
from [Youssef et al., 2023](https://www.authorea.com/users/602611/articles/633380-dynamic-remodeling-of-escherichia-coli-interactome-in-response-to-environmental-perturbations?commit=26cb401024abe8cce3117ce3e44ece4078c18892)

# Motivation
Protein-protein interactions are dynamic connections that form and disrupt in response to different triggers, yet most studies of the interactome have been in a static manner. As more studies on interactome dynamics emerge, we developed this tool to enable the associated computational analysis in a generalizable manner.
# Inputs
* **Conditional CF/MS data**
* *Long format*: Data frame with 5 columns (protein, fraction, condition, replicate, intensity), or
* *Wide format*: List of protein-by-fraction data frames for each condition, or
* *MaxQuant files*: proteinGroups.txt or peptides.txt
* *(Optional)*: Peptide-level data with same format plus peptide ID column
One condition is designated as the reference/baseline condition to compare other conditions against
* **Reference interactome**
* A table with 2 columns; one per interactor
The reference interactome can come from public databases (e.g. BioGRID, STRING) or independently generated by the user
# Output
Data frame with conditional remodeling scores for each PPI in reference interactome
# What do the remodeling scores represent?
* For a given PPI in a given condition, the remodeling score represents the change in the interacting proteins' profile similarities compared to the reference condition
* A score of 0 means no change in PPI behavior compared to the reference condition
* A positive score suggests PPI disruption
* A negative score suggests PPI strengthening
# How to use pipeline
* **Formatting raw MaxQuant files**
* *formatMaxQuantPeptides.R*
* *formatMaxQuantProteins.R*
* **Pre-processing**
* *preprocessing.R*: Normalize and smooth CF/MS profiles.
* Different types of normalization supported
* Different sliding window lengths for smoothing supported
* *createProteinProfiles.R*
* Filter oulier sibling peptides using different methods
* Sum/average sibling peptides to corresponding protein profiles
* **Compute conditional similarities**
* *computeConditionalSimilarities.R*
* **Compute remodeling scorese**
* *computeRemodelingScores.R*
# Case study
To see an application of the pipeline to make discoveries on the dynamics of the *E. coli* interactome, see [Youssef et al. (2023)](https://www.authorea.com/users/602611/articles/633380-dynamic-remodeling-of-escherichia-coli-interactome-in-response-to-environmental-perturbations?commit=26cb401024abe8cce3117ce3e44ece4078c18892).
<file_sep>/Scripts/computeConditionalSimilarities.R
#' @name computeConditionalSimilarities
#' @title Compute conditional PPI similarities
#'
#' @description Score similarity of interacting protein CF/MS profiles in each
#' condition.
#'
#' There are two scores computed:
#' * ***Co-elution***: Pearson correlation of CF/MS profiles
#'
#' * ***Co-abundance***: Log2 fold-change of summed CF/MS profile
#' intensities. Since CF/MS-derived PPI are undirected, fold-change is
#' fixed as ratio between lower intensity and higher one of the two
#' interacting proteins.
#'
#' @param proteins List of lists. One list per replicate.
#' Each replicate list consists of one protein-by-fraction data frame
#' per condition.
#'
#' @param conditions Character vector with names of experimental conditions.
#'
#' @param referencePPI Character data frame with two columns corresponding to
#' the protein IDs of the interacting proteins in the reference interactome.
#' Protein IDs must match the format of those in the \code{proteins} input.
#'
#' @param logTransformed. Logical. Whether the input protein profiles were
#' log2-transformed. Default TRUE.
#'
#' @return Data frame. Similarity score for each PPI in each condition.
#'
#' @examples
#' conditionalSimilarities <- computeConditionalSimilarities(proteins,
#' conditions,
#' referencePPI)
#'
#' @import dplyr magrittr WGCNA reshape2
NULL
#' @export
computeConditionalSimilarities <- function(proteins,
conditions,
referencePPI,
logTransformed = TRUE){
#compute co-elution scores
coelution <- lapply(conditions, function(curCond){
#iterate over replicates
conditionalSimilarities <- lapply(proteins, function(curRep){
#get profiles of this condition-replicate
curProfiles <- curRep[[curCond]]
#compute Pearson correlation between all pairs of proteins in this condition
allCors <- WGCNA::cor(t(curProfiles))
#get distances of reference protein pairs
refCors <- diag(allCors[referencePPI[,1], referencePPI[,2]])
return(refCors)
}) %>%
#average over replicates
do.call("cbind",.) %>%
rowMeans()
return(conditionalSimilarities)
}) %>%
#add names of conditions
setNames(conditions) %>%
as.data.frame() %>%
#add interacting protein IDs
cbind(referencePPI, .)
#compute co-abundance scores
coabundance <- lapply(conditions, function(curCond){
#iterate over replicates
conditionalFoldChanges <- lapply(proteins, function(curRep){
#get profiles of this condition-replicate
curProfiles <- curRep[[curCond]]
#reverse log2-transformation
if(logTransformed)
curProfiles <- (2^curProfiles) - 1
#sum intensity across fractions for each protein
intensitySums <- rowSums(curProfiles)
#compute fold-changes between all pairs of proteins
allFoldChanges <- outer(intensitySums, intensitySums, FUN = "/")
#get distances of reference protein pairs
refFoldChanges <- diag(allFoldChanges[referencePPI$V1, referencePPI$V2])
#convert fold-changes so that it is always min/max
refFoldChanges[refFoldChanges > 1] <- 1 / refFoldChanges[refFoldChanges > 1]
#log2-transform
refFoldChanges <- log2(refFoldChanges)
return(refFoldChanges)
}) %>%
#average over replicates
do.call("cbind",.) %>%
rowMeans()
return(conditionalFoldChanges)
}) %>%
#add names of conditions
setNames(conditions) %>%
as.data.frame() %>%
#add interacting protein IDs
cbind(referencePPI, .)
#combine similarity scores into one using PCA
combinedSimilarities <- cbind(melt(ppiCors)$value, melt(foldChanges)$value) %>%
#scale columns
scale() %>%
#PCA
prcomp(.) %>%
#retain 1st PC only
.$x %>% .[,1] %>%
#add PPI/condition info
cbind(melt(ppiCors)[,1:3], .) %>%
set_colnames(c("V1", "V2", "condition", "score")) %>%
dcast(V1 + V2 ~ condition)
return(combinedSimilarities)
}
<file_sep>/Scripts/preprocessing.R
#' @name preprocessing
#' @title Preprocess CF/MS profiles
#'
#' @description Log-transform and smooth CF/MS elution profiles.
#'
#' @param profiles List of lists. One list per replicate.
#' Each replicate list consists of one peptide/protein-by-fraction data frame
#' per condition.
#' @param logTransform Character variable indicating type of log-transformation
#' to be applied. The four options are:
#' * "log2": Log2-transform profiles. A pseudocount of 1 is added to each
#' value prior to log-transformation to avoid computing infinity values.
#' * "CLR": Centered log-transform. This is achieved by subtracting the mean
#' value for each peptide/protein in each fraction (i.e. across conditions)
#' after log2-transformation.
#' * "normCLR": Normalized centered log-transform. Approach adopted from Drew
#' et al. (2017) which normalizes the data within each fraction, across all
#' experimental conditions but within each replicate separately,
#' followed by a centered log2-transformation.
#' * "none" *or any other value*: No log-transformation.
#' Any value aside from the above three will not apply any transformation
#' to the data.
#' @param smooth Logical variable indicating whether to smooth CF/MS profiles
#' using moving average.
#' Default is \code{FALSE}.
#' @param smoothingFractions Integer indicating the number of fractions
#' to be considered for smoothing the profiles using a moving average window.
#' Default is 4 fractions. Irrelevant if \code{smooth = TRUE}.
#'
#' @return List of lists. Same structure as the \code{profiles} input list.
#'
#' @examples
#' peptides <- preprocessing(profiles = profiles, logTransform = "log",
#' smooth = TRUE, smoothingFractions = 4)
#'
#' @import dplyr data.table reshape2 forecast
NULL
#' @export
preprocessing <- function(profiles,
logTransform = c('log2','CLR', 'normCLR','none'),
smooth = c(FALSE, TRUE),
smoothingFractions = 4){
#log-transform
if(logTransform %in% c("log2", "CLR", "normCLR")){
if(logTransform == "normCLR"){
#normalize profiles
profiles <- lapply(profiles, function(curReplicate){
#label conditions
curReplicate <- lapply(names(curReplicate), function(curCond){
curReplicate[[curCond]]$cond <- curCond
return(curReplicate[[curCond]])
})
#normalize across each fraction
suppressMessages(normprofiles <- normFrac(curReplicate))
return(normprofiles)
})
}
#log2-transform
profiles <- lapply(profiles,function(curRep){
return(lapply(curRep, function(curCond){
#store profile (protein/peptide) IDs in variable
ids <- dplyr::select_if(curCond, is.character)
#keep intensity values only
cofracTable <- dplyr::select_if(curCond, is.numeric)
#log2-transformation with pseudocount of 1
cofracTable <- log2(cofracTable + 1)
return(cbind(ids, cofracTable))
}))
})
#centered log-transform
if(logTransform %in% c("CLR", "normCLR")){
profiles <- lapply(profiles, function(curReplicate){
return(lapply(curReplicate, function(curCond){
#store profile (protein/peptide) IDs in variable
ids <- dplyr::select_if(curCond, is.character)
#keep intensity values only
cofracTable <- dplyr::select_if(curCond, is.numeric)
#subtract mean in each fraction
cofracTable <- apply(cofracTable, 2, function(x) return(x - mean(x)))
return(cbind(ids, cofracTable))
}))
})
}
}
#smooth profiles
if(smooth){
profiles <- lapply(profiles, function(curReplicate){
return(lapply(curReplicate, smoothProfile, windowSize = smoothingFractions))
})
}
return(profiles)
}
#' Normalize CF/MS profiles within fractions
#'
#' Helper function to normalize peptide/protein CF/MS profiles within each
#' fraction and across experimental conditions.
#'
#' @param condTables List of dataframes. One dataframe per experimental
#' condition. The first columns have the protein ID, and peptide sequence
#' if the input is peptide-level data, and the subsequent columns have the
#' per-fraction intensities.
#'
#' @return List of dataframes after normalization. Same structure as the
#' \code{condTables} input list.
normFrac <- function(condTables){
#merge conditions into one table
condTables <- do.call("rbind", condTables)
#convert to long format
condTables <- reshape2::melt(condTables)
#normalize intensities of each peptide in each fraction across conditions
condTables <- condTables %>%
dplyr::group_by(variable) %>%
dplyr::mutate(value = value / (sum(value) + 0.1))
#separate peptides by condition
condTables <- split(condTables, condTables$cond)
#re-convert to wide format
if("sequence" %in% colnames(condTables))
condTables <- lapply(condTables, function(x){
return(reshape2::dcast(x, sequence + protein ~ variable))})
else
condTables <- lapply(condTables, function(x){
return(reshape2::dcast(x, protein ~ variable))})
return(condTables)
}
#' Smooth signals with moving average
#'
#' Helper function to smooth peptide/protein CF/MS profiles using a
#' moving average approach. The *forecast* package is used in this function.
#'
#' @param condTables List of dataframes. One dataframe per experimental
#' condition. The first columns have the protein ID, and peptide sequence,
#' if the input is peptide-level data, and the subsequent columns have the
#' per-fraction intensities.
#' @param windowSize Integer indicating the number of fractions
#' to be considered for smoothing the profiles using a moving average window.
#'
#' @return List of dataframes after smoothing. Same structure as the
#' \code{condTables} input list.
#function for smoothing all profiles in a given table
smoothProfile <- function(condTables, windowSize){
#store profile (protein/peptide) IDs in variable
ids <- dplyr::select_if(condTables, is.character)
#keep intensity values only
cofracTable <- dplyr::select_if(condTables, is.numeric)
#iterate over all rows of the table
smoothed <- apply(cofracTable, 1, function(profile){
toSmooth <- c(rep(0,round(windowSize/2)), t(profile), rep(0,round(windowSize/2)))
return(forecast::ma(toSmooth, windowSize)[3:98])
})
return(cbind(ids, t(smoothed)))
}
<file_sep>/Scripts/formatMaxQuantProteins.R
#' @name formatMaxQuantProteins
#' @title Format MaxQuant proteinGroups.txt file
#'
#' @description Formats TMT-multiplexed proteinGroups.txt file from MaxQuant output to
#' programming-friendly protein-by-fraction tables.
#'
#' @param file_path Character vector giving the list of local paths
#' to the proteinGroups.txt files. One file per replicate.
#' @param conditions Character vector with names of experimental conditions.
#'
#' @return List of lists. One list per replicate named \code{replicate_X}.
#' Each replicate list consists of one protein-by-fraction data frame per condition.
#' The first column of data frame has the protein ID as stored in the 'Gene names' column,
#' and the following columns contain the corrected reporter intensities for the fractions.
#' Only the proteins that were detected in all replicates are retained.
#'
#' @examples
#' proteins <- formatMaxQuantProteins(c("Documents/proteinGroups_rep1.txt", "Documents/proteinGroups_rep2.txt"),
#' conditions = c("Brain", "Lung", "Liver"))
#'
#' @import dplyr data.table
NULL
#' @export
formatMaxQuantProteins <- function(file_path, conditions){
#iterate over files (one per replicate)
prots <- lapply(file_path, function(file){
#read in MaxQuant file
prots <- as.data.frame(data.table::fread(file))
#make sure correct columns are present
if("Sequence" %in% colnames(prots)){
stop("Incorrect format. Please input proteinGroups.txt file from MaxQuant output.")
}
#remove contaminants
prots <- prots[-grep("^CON", prots$`Protein IDs`),]
#keep relevant columns only (peptide/protein name, reporter corrected intensity values)
tmp <- prots[,c("Gene names",
grep(pattern = "corrected", x = colnames(prots), value = T))][,-c(3:(length(conditions)+2))]
#create empty list to store data for each condition
conds <- vector(mode = "list", length = length(conditions))
names(conds) <- conditions
#create peptide/protein table list for easier downstream processing
#loop over conditions
for(i in seq_along(conditions)){
#get relevant condition
tmp <- prots[ , c(1, grep(pattern = paste0("Reporter intensity corrected ",i-1,"[[:blank:]][[:alpha:]]"), x = colnames(prots)))]
#rename column names
colnames(tmp) <- c("protein",
gsub(pattern = paste0("Reporter intensity corrected ", i-1, "[[:blank:]][[:alpha:]]"), replacement = "", x = colnames(tmp)[-1]))
#reorder columns by fractions
tmp <- tmp[,c(1, order(as.numeric(colnames(tmp)[-1]))+1)]
#set rownames to unique IDs
rownames(tmp) <- tmp[,1]
#add to list
conds[[i]] <- tmp
}
#change variable names and remove temporary variables
prots <- conds
rm(conds); rm(tmp)
#remove nameless proteins
prots <- lapply(prots, function(curCond){return(curCond[curCond$protein != "",])})
})
#name lists according to corresponding replicate
names(prots) <- paste0("replicate_", seq(length(prots)))
#if multiple replicates present
if(length(prots) > 1){
## retain overlapping proteins only ##
#get all proteins detected in each replicate
allprots <- lapply(prots, function(curRep){
return(unlist(lapply(curRep, "[", 1)))
})
#find overlapping proteins
commonprots <- Reduce(intersect, allprots)
#subset tables to overlapping proteins only
prots <- lapply(prots, function(curRep){
return(lapply(curRep, function(curCond){
return(curCond[commonprots,])
}))
})
}
return(prots)
}
<file_sep>/Scripts/createProteinProfiles.R
#' @name createProteinProfiles
#' @title Create protein profiles from peptide-level CF/MS data
#'
#' @description Collapse peptide-level dataset to the corresponding protein-level intensities.
#'
#' There are three steps involved:
#' * ***Filter outlier peptides***: Each peptide is mapped to a particular
#' protein by the peptide mapping software like *MaxQuant*
#' in an upstream step. It is expected that the peptides that map to the same
#' protein should have similar profiles to each other, and as such any peptide
#' that deviates significantly from its group is likely a faulty measurement.
#' Two strategies for identifying these measurements are described below.
#'
#' * ***Select top-intensity peptides***: A common strategy for creating
#' protein profiles from peptide profiles is to average or sum the intensities
#' of the top two or three high-intensity peptides to represent the final
#' protein profile (e.g. Silva et. al, 2005). This function gives the user
#' the option to perfrm such filtering based on any given number of peptides.
#'
#' * ***Collapse peptide intensities to protein-level intensities***:
#' Sum or average sibling peptide intensities across fractions to create
#' the corresponding protein-level profiles.
#'
#' @param peps List of lists. One list per replicate.
#' Each replicate list consists of one peptide/protein-by-fraction data frame
#' per condition.
#' @param cores Integer indicating the number of computer cores to use for
#' parallel computation of distances.
#' @param filterOutliers Character variable indicating type of sibling peptide
#' outlier filtering to be applied. The three options are:
#' * "clustering": Perform average-linkage hierarchical clustering on the
#' sibling peptides based on their similarity to each other, split the
#' resulting dendrogram into two clusters, and retain the peptides belonging
#' to the larger cluster as being representative of the protein.
#' * "quantile": Compute each peptide’s average similarity to its
#' ‘sibling peptides’ and filter out the peptides beyond a quantile-based
#' threshold, which by default is 0.95. This means that the 95% most similar
#' peptides would be retained for downstream analysis.
#' * "none" *or any other value*: No peptide outlier filtering.
#' Any value aside from the above two will not apply any filtering
#' to the data.
#' Default method is "clustering".
#' @param threshold Numeric between 0 and 1 indicating threshold to filter
#' outlier peptides based upon. Only relevant if quantile filtering is
#' chosen. Default is 0.95.
#' @param topN Integer indicating how many peptides to retain per protein.
#' Selected peptides are those with the highest cross-fraction summed
#' intensities. Default is NA which retains all peptides.
#' @param distanceMetric Character indicating which distance metric to use to
#' compute sibling peptide similarity based upon. Choices are Wasserstein
#' distance, Euclidean distance, and Pearson distance (1 - Pearson R2).
#' Default is Pearson.
#' @param method Character specifying how to merge peptide intensities to
#' protein intensities. One of "sum" or "average". Default is "average".
#'
#' @return List of lists. One list per replicate named \code{replicate_X}.
#' Each replicate list consists of one protein-by-fraction data frame per condition.
#' The first column of data frame has the protein ID,
#' and the following columns contain the intensities for each fraction.
#' Only the peptides that were detected in all replicates are retained.
#'
#' @examples
#' proteins <- createProteinProfiles(peps, topN = 3)
#'
#' @import dplyr data.table data.table reshape2 igraph
NULL
#' @export
#'
createProteinProfiles <- function(peps,
cores = 1,
filterOutliers = c("clustering", "quantile", "none"),
threshold = 0.95,
topN = NA,
distanceMetric = c( 'pearson', 'wasserstein', 'euclidean'),
method = c("average", "sum")){
#filter outlier peptides
if(filterOutliers %in% c("quantile", "clustering")){
peps <- lapply(peps, function(curReplicate){
return(lapply(curReplicate, filterOutlierPeptides,
metric = distanceMetric, method = filterOutliers,
cores = cores, threshold = NULL))
})
}
#select top N intensity proteins per protein
if(is.numeric(topN)){
peps <- lapply(peps, function(curReplicate){
return(lapply(curReplicate, selectTopPeptides, n = topN))
})
}
#merge peptide profiles into protein profiles
proteins <- lapply(peps, function(curReplicate){
return(suppressMessages(
lapply(curReplicate, mergePeptideProfiles, method = method))) })
return(proteins)
}
## Helper function: compute distances between sibling peptides ##
# input: profile-by-fraction matrix, first column has peptide IDs,
# second column has protein IDs and must be named 'protein'
# output: list of sibling peptide distance matrices, one per protein
siblingDistances <- function(pepTable, metric = c('wasserstein', 'euclidean', 'pearson'), cores = 6){
#split input table into tables of sibling peptides
profiles <- split(pepTable, with(pepTable, protein), drop=TRUE)
#compute all pairwise distances for each group of sibling peptides
sibDistances <- pblapply(profiles, function(siblings){
#if single-peptide protein return a distance of 0
if(nrow(siblings) == 1)
sibs <- data.frame(id1 = siblings[,1], id2 = siblings[,1], distance = 0)
else
sibs <- computeDistances(cofracTable = siblings, metric = metric)
return(sibs)
}, cl = cores)
return(sibDistances)
}
## Filter peptides based on similarity to sibling peptides ##
#input: Profile-by-fraction matrix. First column has peptide IDs, second column has protein IDs and must be named 'protein'
#input: Distance metric for sibling peptides.
#input: Method for filtering peptides. One of 'quantile' or 'clustering'.
#input: Distance cutoff threshold as quantile (e.g. 0.95 would filter out the top 5% peptides with highest distance)
filterOutlierPeptides <- function(pepTable,
metric = c('pearson', 'euclidean', 'wasserstein'),
method = c("clustering", "quantile"),
threshold = 0.95,
cores){
#compute distances between sibling peptides
sibDistances <- siblingDistances(pepTable, metric, cores)
#filter outlier peptides
if(method == "quantile"){
#collapse sibling peptide distances into one table
collapsedDis <- do.call("rbind", sibDistances)
#in case of 'infinity' distance, set to max distance
collapsedDis$distance[is.infinite(collapsedDis$distance)] <- max(collapsedDis$distance[is.finite(collapsedDis$distance)])
#list of all unique peptides
allPeps <- pepTable[,1]
#get average distance for each peptide
#print("Computing average agreement of each peptide with its siblings...")
sibAgreement <- pblapply(allPeps, function(pep){
return(mean(unlist(collapsedDis[collapsedDis$id1==pep | collapsedDis$id2==pep, "distance"]), na.rm = TRUE))
},cl = cores)
names(sibAgreement) <- allPeps
#retain peptides which fall below threshold
sibAgreement[unlist(sibAgreement) > quantile(unlist(sibAgreement), threshold, na.rm = TRUE)] <- NULL
sibAgreement[is.na(unlist(sibAgreement))] <- NULL
#return filtered peptide table
return(pepTable[pepTable[,1] %in% names(sibAgreement),])
}
if(method == "clustering"){
#print("Filtering outlier peptides using clustering...")
#iterate over proteins and decide which peptides to retain
clusteredPeps <- pblapply(sibDistances, function(siblings){
#remove any NA values (non-reproducible peptides)
siblings <- siblings[complete.cases(siblings),]
#if single-peptide protein return the peptide
if(length(union(siblings$id1, siblings$id2)) <= 1){
return(siblings$id1)
}
#convert to distance matrix via igraph
distMatrix <- igraph::graph.data.frame(siblings[,1:2], directed = FALSE)
E(distMatrix)$weight <- unlist(siblings[,3])
distMatrix <- as.dist(as.matrix(as_adjacency_matrix(distMatrix, attr = "weight", sparse = TRUE)))
#average-linkage hierarchical clustering with 2 clusters
pepsClust <- cutree(hclust(distMatrix, method = "average"), k = 2)
#get peptides belonging to largest cluster
toKeep <- names(pepsClust)[pepsClust == tail(names(sort(table(pepsClust))), 1)]
#return peptides to keep
return(toKeep)
}, cl = cores)
#keep peptides after filtering outliers
return(pepTable[pepTable[,1] %in% unlist(clusteredPeps),])
}
}
## Select top N peptides ##
#input: Profile-by-fraction matrix. First column has peptide IDs, second column has protein IDs and must be named 'protein'
#input: Number of top-intensity peptides to retain as representative of each protein 'n'. Default number is 3.
selectTopPeptides <- function(pepTable, n = 3){
#sum up intensity of peptides
pepSums <- rowSums(pepTable[,-c(1,2)])
#create table with peptide, parent protein, and sum of intensity
pepSums <- data.table::data.table(cbind(pepTable[,c(1,2)], pepSums), key = "protein")
#in case of replicates, keep max sum across replicates for each peptide
pepSums <- pepSums %>%
dplyr::group_by_at(1) %>%
dplyr::filter(pepSums == max(pepSums))
#find top N peptides for each protein
topPeps <- pepSums %>%
dplyr::arrange(desc(pepSums)) %>%
dplyr::group_by(protein) %>%
dplyr::slice(1:n)
#retain only the top N peptides for each protein
filteredPeps <- pepTable[unlist(pepTable[,1]) %in% unlist(topPeps[,1]),]
return(filteredPeps)
}
# input: Profile-by-fraction matrix, first column has peptide IDs, second column has protein IDs and must be named 'protein'.
# input: Method for merging peptide profiles.
# output: One protein-by-fraction matrix.
mergePeptideProfiles <- function(pepTable, method = c("average", "sum")){
#convert data to long format
suppressMessages(pepTable <- reshape2::melt(pepTable))
#sum up peptide profiles belonging to each protein
if(method == "sum"){
#sum peptides in each fraction
proteinTable <- pepTable %>%
dplyr::group_by(protein, variable) %>%
dplyr::summarise(intensity = sum(value))
#convert to wide format
proteinTable <- reshape2::dcast(proteinTable, protein ~ variable)
#remove proteins with zero intensity values
proteinTable <- proteinTable[rowSums(proteinTable[,-1]) > 0, ]
#set rownames to protein IDs
rownames(proteinTable) <- proteinTable$protein
return(proteinTable)
}
#average peptide profiles belonging to each protein
if(method == "average"){
#average peptides in each fraction
proteinTable <- pepTable %>%
dplyr::group_by(protein, variable) %>%
dplyr::summarise(intensity = mean(value))
#convert to wide format
proteinTable <- reshape2::dcast(proteinTable, protein ~ variable)
#remove proteins with zero intensity values
proteinTable <- proteinTable[rowSums(proteinTable[,-1]) > 0, ]
#set rownames to protein IDs
rownames(proteinTable) <- proteinTable$protein
return(proteinTable)
}
}
|
279be1a5a9885ab61fd0dc5de4ee0b4372004ff7
|
[
"Markdown",
"R"
] | 7
|
R
|
AhmedYoussef95/Ecoli-dynamic-interactome
|
3c0d9126b5d3d579a169bfce17f4faf487c69eab
|
0c85f22eb8775f4b0876cf0032547c907cd21130
|
refs/heads/master
|
<file_sep>// Copyright 2017, Square, Inc.
// Package status provides system-wide status.
package status
import (
"time"
"github.com/square/spincycle/job-runner/chain"
"github.com/square/spincycle/proto"
)
type Manager interface {
Running() ([]proto.JobStatus, error)
}
type manager struct {
cr chain.Repo
}
func NewManager(cr chain.Repo) *manager {
m := &manager{
cr: cr,
}
return m
}
func (m *manager) Running() ([]proto.JobStatus, error) {
chains, err := m.cr.GetAll()
if err != nil {
return nil, err
}
running := []proto.JobStatus{}
for _, c := range chains {
for jobId, j := range c.Running {
startTime := time.Unix(0, j.StartTs)
s := proto.JobStatus{
RequestId: c.RequestId(),
JobId: jobId,
State: proto.STATE_RUNNING, // must be since it's in chain.Running
Runtime: time.Now().Sub(startTime).Seconds(),
N: j.N,
}
running = append(running, s)
}
}
return running, nil
}
<file_sep>[[constraint]]
name = "github.com/Sirupsen/logrus"
version = "1.0.2"
[[constraint]]
name = "github.com/alicebob/miniredis"
version = "2.1.0"
[[constraint]]
name = "github.com/garyburd/redigo"
version = "1.3.0"
[[constraint]]
name = "github.com/go-test/deep"
version = "1.0.0"
[[constraint]]
name = "github.com/rs/xid"
version = "1.1.0"
[prune]
go-tests = true
unused-packages = true
[[constraint]]
name = "github.com/labstack/echo"
version = "3.2.0"
<file_sep>// Copyright 2017, Square, Inc.
package chain_test
import (
"sort"
"sync"
"testing"
"github.com/go-test/deep"
"github.com/square/spincycle/job-runner/chain"
"github.com/square/spincycle/proto"
testutil "github.com/square/spincycle/test"
"github.com/square/spincycle/test/mock"
)
// Return an error when we try to create an invalid chain.
func TestRunErrorNoFirstJob(t *testing.T) {
chainRepo := chain.NewMemoryRepo()
rf := &mock.RunnerFactory{}
rmc := &mock.RMClient{}
f := chain.NewTraverserFactory(chainRepo, rf, rmc)
jc := proto.JobChain{
RequestId: "abc",
Jobs: testutil.InitJobs(2),
AdjacencyList: map[string][]string{},
}
tr, err := f.Make(jc)
if err == nil {
t.Errorf("expected an error but did not get one")
}
if tr != nil {
t.Errorf("got non-nil Traverser, expected nil on error")
}
}
// All jobs in the chain complete successfully.
func TestRunComplete(t *testing.T) {
requestId := "abc"
chainRepo := chain.NewMemoryRepo()
rf := &mock.RunnerFactory{
RunnersToReturn: map[string]*mock.Runner{
"job1": &mock.Runner{RunReturn: proto.STATE_COMPLETE},
"job2": &mock.Runner{RunReturn: proto.STATE_COMPLETE},
"job3": &mock.Runner{RunReturn: proto.STATE_COMPLETE},
"job4": &mock.Runner{RunReturn: proto.STATE_COMPLETE},
},
}
rmc := &mock.RMClient{}
jc := &proto.JobChain{
RequestId: requestId,
Jobs: testutil.InitJobs(4),
AdjacencyList: map[string][]string{
"job1": {"job2", "job3"},
"job2": {"job4"},
"job3": {"job4"},
},
}
c := chain.NewChain(jc)
traverser := chain.NewTraverser(c, chainRepo, rf, rmc)
err := traverser.Run()
if err != nil {
t.Errorf("err = %s, expected nil", err)
}
if c.JobChain.State != proto.STATE_COMPLETE {
t.Errorf("chain state = %d, expected %d", c.JobChain.State, proto.STATE_COMPLETE)
}
_, err = chainRepo.Get(requestId)
if err != chain.ErrNotFound {
t.Error("chain still in repo, expected it to be removed")
}
}
// Not all jobs in the chain complete successfully.
func TestRunNotComplete(t *testing.T) {
chainRepo := chain.NewMemoryRepo()
rf := &mock.RunnerFactory{
RunnersToReturn: map[string]*mock.Runner{
"job1": &mock.Runner{RunReturn: proto.STATE_COMPLETE},
"job2": &mock.Runner{RunReturn: proto.STATE_COMPLETE},
"job3": &mock.Runner{RunReturn: proto.STATE_FAIL},
"job4": &mock.Runner{RunReturn: proto.STATE_FAIL},
},
}
rmc := &mock.RMClient{}
jc := &proto.JobChain{
RequestId: "abc",
Jobs: testutil.InitJobs(4),
AdjacencyList: map[string][]string{
"job1": {"job2", "job3"},
"job2": {"job4"},
"job3": {"job4"},
},
}
c := chain.NewChain(jc)
traverser := chain.NewTraverser(c, chainRepo, rf, rmc)
err := traverser.Run()
if err != nil {
t.Errorf("err = %s, expected nil", err)
}
if c.JobChain.State != proto.STATE_FAIL {
t.Errorf("chain state = %d, expected %d", c.JobChain.State, proto.STATE_FAIL)
}
if c.JobChain.Jobs["job4"].State != proto.STATE_PENDING {
t.Errorf("job4 state = %d, expected %d", c.JobChain.Jobs["job4"].State, proto.STATE_PENDING)
}
_, err = chainRepo.Get("abc")
if err != chain.ErrNotFound {
t.Error("chain still in repo, expected it to be removed")
}
}
// Unknown job state should not cause the traverser to panic when running.
func TestJobUnknownState(t *testing.T) {
chainRepo := chain.NewMemoryRepo()
rf := &mock.RunnerFactory{
RunnersToReturn: map[string]*mock.Runner{
"job1": &mock.Runner{RunReturn: proto.STATE_COMPLETE},
"job2": &mock.Runner{RunReturn: proto.STATE_COMPLETE},
"job3": &mock.Runner{RunReturn: proto.STATE_COMPLETE},
"job4": &mock.Runner{RunReturn: proto.STATE_COMPLETE},
},
}
rmc := &mock.RMClient{}
jc := &proto.JobChain{
RequestId: "abc",
Jobs: testutil.InitJobs(3),
AdjacencyList: map[string][]string{
"job1": {"job2"},
"job2": {"job3"},
"job3": {},
},
}
c := chain.NewChain(jc)
for _, j := range c.JobChain.Jobs {
j.State = proto.STATE_UNKNOWN
}
traverser := chain.NewTraverser(c, chainRepo, rf, rmc)
if err := traverser.Run(); err != nil {
t.Errorf("err = %s, expected nil", err)
}
if c.JobChain.State != proto.STATE_COMPLETE {
t.Errorf("chain state = %d, expected %d", c.JobChain.State, proto.STATE_COMPLETE)
}
_, err := chainRepo.Get("abc")
if err != chain.ErrNotFound {
t.Error("chain still in repo, expected it to be removed")
}
}
// Make sure jobData gets updated as we expect.
func TestJobData(t *testing.T) {
chainRepo := chain.NewMemoryRepo()
rf := &mock.RunnerFactory{
RunnersToReturn: map[string]*mock.Runner{
"job1": &mock.Runner{RunReturn: proto.STATE_COMPLETE, AddedJobData: map[string]interface{}{"k1": "v1", "k2": "v2"}},
"job2": &mock.Runner{RunReturn: proto.STATE_COMPLETE, AddedJobData: map[string]interface{}{}},
"job3": &mock.Runner{RunReturn: proto.STATE_COMPLETE, AddedJobData: map[string]interface{}{}},
"job4": &mock.Runner{RunReturn: proto.STATE_COMPLETE, AddedJobData: map[string]interface{}{"k1": "v9"}},
},
}
rmc := &mock.RMClient{}
jc := &proto.JobChain{
RequestId: "abc",
Jobs: testutil.InitJobs(4),
AdjacencyList: map[string][]string{
"job1": {"job2", "job3"},
"job2": {"job4"},
"job3": {"job4"},
},
}
c := chain.NewChain(jc)
traverser := chain.NewTraverser(c, chainRepo, rf, rmc)
expectedJobData := map[string]interface{}{"k1": "v9", "k2": "v2"}
err := traverser.Run()
if err != nil {
t.Errorf("err = %s, expected nil", err)
}
if diff := deep.Equal(jc.Jobs["job4"].Data, expectedJobData); diff != nil {
t.Error(diff)
}
}
// Error creating a runner.
func TestRunJobsRunnerError(t *testing.T) {
requestId := "abc"
chainRepo := chain.NewMemoryRepo()
rf := &mock.RunnerFactory{
RunnersToReturn: map[string]*mock.Runner{
"job1": &mock.Runner{RunReturn: proto.STATE_COMPLETE},
},
// This is what causes the error, even though the job returns STATE_COMPLETE
MakeErr: mock.ErrRunner,
}
var recvdjl proto.JobLog // record the jl that gets sent to the RM
rmc := &mock.RMClient{
CreateJLFunc: func(reqId string, jl proto.JobLog) error {
if reqId == requestId {
recvdjl = jl
return nil
}
return mock.ErrRMClient
},
}
jc := &proto.JobChain{
RequestId: requestId,
Jobs: testutil.InitJobs(1),
}
c := chain.NewChain(jc)
traverser := chain.NewTraverser(c, chainRepo, rf, rmc)
err := traverser.Run()
if err != nil {
t.Errorf("err = %s, expected nil", err)
}
if jc.State != proto.STATE_FAIL {
t.Errorf("chain state = %d, expected %d", jc.State, proto.STATE_FAIL)
}
// Make sure the JL sent to the RM matches what we expect.
if recvdjl.RequestId != requestId {
t.Errorf("jl request id = %d, expected %d", recvdjl.RequestId, requestId)
}
if recvdjl.JobId != "job1" {
t.Errorf("jl job id = %d, expected %d", recvdjl.JobId, "job1")
}
if recvdjl.State != proto.STATE_FAIL {
t.Errorf("jl state = %d, expected %d", recvdjl.State, proto.STATE_FAIL)
}
if recvdjl.Error == "" {
t.Errorf("jl error is empty, expected something")
}
if recvdjl.StartedAt != 0 {
t.Errorf("jobLog.StartedAt = %d, expected 0", recvdjl.StartedAt)
}
if recvdjl.FinishedAt != 0 {
t.Errorf("jobLog.Finished = %d, expected 0", recvdjl.FinishedAt)
}
_, err = chainRepo.Get("abc")
if err != chain.ErrNotFound {
t.Error("chain still in repo, expected it to be removed")
}
}
// Stop the traverser and all running jobs.
func TestStop(t *testing.T) {
chainRepo := chain.NewMemoryRepo()
var runWg sync.WaitGroup
runWg.Add(2)
rf := &mock.RunnerFactory{
RunnersToReturn: map[string]*mock.Runner{
"job1": &mock.Runner{RunReturn: proto.STATE_COMPLETE},
"job2": &mock.Runner{RunReturn: proto.STATE_FAIL, RunBlock: make(chan struct{}), RunWg: &runWg},
"job3": &mock.Runner{RunReturn: proto.STATE_FAIL, RunBlock: make(chan struct{}), RunWg: &runWg},
},
}
rmc := &mock.RMClient{}
jc := &proto.JobChain{
RequestId: "abc",
Jobs: testutil.InitJobs(4),
AdjacencyList: map[string][]string{
"job1": {"job2", "job3"},
"job2": {"job4"},
"job3": {"job4"},
},
}
c := chain.NewChain(jc)
traverser := chain.NewTraverser(c, chainRepo, rf, rmc)
// Start the traverser.
doneChan := make(chan struct{})
go func() {
traverser.Run()
close(doneChan)
}()
// Wait until jobs 2 and 3 are running (until they call wg.Done()). They will run
// until Stop is called (which will close their RunBlock channels).
runWg.Wait()
err := traverser.Stop()
if err != nil {
t.Errorf("err = %s, expected nil", err)
}
// Wait for the traverser to finish.
<-doneChan
if c.JobChain.State != proto.STATE_FAIL {
t.Errorf("chain state = %d, expected %d", c.JobChain.State, proto.STATE_COMPLETE)
}
if c.JobChain.Jobs["job2"].State != proto.STATE_FAIL {
t.Errorf("job2 state = %d, expected %d", c.JobChain.Jobs["job2"].State, proto.STATE_FAIL)
}
if c.JobChain.Jobs["job3"].State != proto.STATE_FAIL {
t.Errorf("job3 state = %d, expected %d", c.JobChain.Jobs["job3"].State, proto.STATE_FAIL)
}
if c.JobChain.Jobs["job4"].State != proto.STATE_PENDING {
t.Errorf("job4 state = %d, expected %d", c.JobChain.Jobs["job4"].State, proto.STATE_PENDING)
}
_, err = chainRepo.Get("abc")
if err != chain.ErrNotFound {
t.Error("chain still in repo, expected it to be removed")
}
}
// Get the status from all running jobs.
func TestStatus(t *testing.T) {
chainRepo := chain.NewMemoryRepo()
var runWg sync.WaitGroup
runWg.Add(2)
rf := &mock.RunnerFactory{
RunnersToReturn: map[string]*mock.Runner{
"job1": &mock.Runner{RunReturn: proto.STATE_COMPLETE, StatusResp: "job1 running"},
"job2": &mock.Runner{RunReturn: proto.STATE_COMPLETE, StatusResp: "job2 running", RunBlock: make(chan struct{}), RunWg: &runWg},
"job3": &mock.Runner{RunReturn: proto.STATE_COMPLETE, StatusResp: "job3 running", RunBlock: make(chan struct{}), RunWg: &runWg},
"job4": &mock.Runner{RunReturn: proto.STATE_COMPLETE, StatusResp: "job4 running"},
},
}
rmc := &mock.RMClient{}
jc := &proto.JobChain{
RequestId: "abc",
Jobs: testutil.InitJobs(4),
AdjacencyList: map[string][]string{
"job1": {"job2", "job3"},
"job2": {"job4"},
"job3": {"job4"},
},
}
c := chain.NewChain(jc)
traverser := chain.NewTraverser(c, chainRepo, rf, rmc)
// Start the traverser.
doneChan := make(chan struct{})
go func() {
traverser.Run()
close(doneChan)
}()
// Wait until jobs 2 and 3 are running (until they call wg.Done()). They will run
// until Status is called (which will close their RunBlock channels).
runWg.Wait()
expectedStatus := proto.JobChainStatus{
RequestId: "abc",
JobStatuses: proto.JobStatuses{
proto.JobStatus{
JobId: "job2",
State: proto.STATE_RUNNING,
Status: "job2 running",
N: 0,
},
proto.JobStatus{
JobId: "job3",
State: proto.STATE_RUNNING,
Status: "job3 running",
N: 0,
},
},
}
status, err := traverser.Status()
sort.Sort(status.JobStatuses)
if err != nil {
t.Errorf("err = %s, expected nil", err)
}
if diff := deep.Equal(status, expectedStatus); diff != nil {
t.Error(diff)
}
// Wait for the traverser to finish.
<-doneChan
if c.JobChain.State != proto.STATE_COMPLETE {
t.Errorf("chain state = %d, expected %d", c.JobChain.State, proto.STATE_COMPLETE)
}
}
<file_sep>CREATE TABLE IF NOT EXISTS `requests` (
`request_id` BINARY(20) NOT NULL,
`type` VARBINARY(75) NOT NULL,
`state` TINYINT UNSIGNED NOT NULL DEFAULT 0,
`user` VARCHAR(100) NULL DEFAULT NULL,
`created_at` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
`started_at` TIMESTAMP NULL DEFAULT NULL,
`finished_at` TIMESTAMP NULL DEFAULT NULL,
`total_jobs` INT UNSIGNED NOT NULL DEFAULT 0,
`finished_jobs` INT UNSIGNED NOT NULL DEFAULT 0,
PRIMARY KEY (`request_id`),
INDEX (`created_at`), -- recently created
INDEX (`finished_at`), -- recently finished
INDEX (`state`) -- currently running
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
CREATE TABLE IF NOT EXISTS `raw_requests` (
`request_id` BINARY(20) NOT NULL,
`request` BLOB NOT NULL,
`job_chain` BLOB NOT NULL,
PRIMARY KEY (`request_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
CREATE TABLE IF NOT EXISTS `job_log` (
`request_id` BINARY(20) NOT NULL,
`job_id` BINARY(4) NOT NULL,
`name` VARBINARY(100) NOT NULL,
`try` SMALLINT NOT NULL DEFAULT 0,
`type` VARBINARY(75) NOT NULL,
`state` TINYINT UNSIGNED NOT NULL DEFAULT 0,
`started_at` BIGINT UNSIGNED NOT NULL DEFAULT 0, -- Unix time (nanoseconds)
`finished_at` BIGINT UNSIGNED NOT NULL DEFAULT 0, -- Unix time (nanoseconds)
`error` TEXT NULL DEFAULT NULL,
`exit` TINYINT UNSIGNED NULL DEFAULT NULL,
`stdout` LONGBLOB NULL DEFAULT NULL,
`stderr` LONGBLOB NULL DEFAULT NULL,
PRIMARY KEY (`request_id`, `job_id`, `try`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
<file_sep>// Copyright 2017-2018, Square, Inc.
package main
import (
"log"
"github.com/square/spincycle/job-runner/app"
"github.com/square/spincycle/job-runner/server"
)
func main() {
err := server.Run(app.Defaults())
log.Fatal("Job Runner stopped: %s", err)
}
<file_sep>// Copyright 2017-2018, Square, Inc.
package app
import (
"database/sql"
"fmt"
"io/ioutil"
"log"
"net/http"
"os"
"time"
myconn "github.com/go-mysql/conn"
"github.com/go-sql-driver/mysql"
"github.com/square/spincycle/config"
jr "github.com/square/spincycle/job-runner"
"github.com/square/spincycle/jobs"
"github.com/square/spincycle/request-manager/grapher"
"github.com/square/spincycle/request-manager/id"
"github.com/square/spincycle/util"
)
type Context struct {
Hooks Hooks
Factories Factories
Config config.RequestManager
}
type Factories struct {
MakeGrapher func(Context) (grapher.GrapherFactory, error) // @fixme
MakeJobRunnerClient func(Context) (jr.Client, error)
MakeDbConnPool func(Context) (myconn.Connector, error)
}
type Hooks struct {
LoadConfig func(Context) (config.RequestManager, error)
Auth func(*http.Request) (bool, error)
SetUsername func(*http.Request) (string, error)
}
func Defaults() Context {
return Context{
Factories: Factories{
MakeGrapher: MakeGrapher,
MakeJobRunnerClient: MakeJobRunnerClient,
MakeDbConnPool: MakeDbConnPool,
},
Hooks: Hooks{
LoadConfig: LoadConfig,
SetUsername: (func(ireq *http.Request) (string, error) {
return "admin", nil
}),
},
}
}
func LoadConfig(ctx Context) (config.RequestManager, error) {
var cfgFile string
if len(os.Args) > 1 {
cfgFile = os.Args[1]
} else {
switch os.Getenv("ENVIRONMENT") {
case "staging":
cfgFile = "config/staging.yaml"
case "production":
cfgFile = "config/staging.yaml"
default:
cfgFile = "config/development.yaml"
}
}
var cfg config.RequestManager
err := config.Load(cfgFile, &cfg)
return cfg, err
}
func MakeGrapher(ctx Context) (grapher.GrapherFactory, error) {
allGrapherCfgs := grapher.Config{
Sequences: map[string]*grapher.SequenceSpec{},
}
// For each config in the cfg.SpecFileDir directory, read the file and
// then aggregate all of the resulting configs into a single struct.
files, _ := ioutil.ReadDir(ctx.Config.SpecFileDir) // add your specs to this dir
for _, f := range files {
grapherCfg, err := grapher.ReadConfig(ctx.Config.SpecFileDir + "/" + f.Name())
if err != nil {
return nil, fmt.Errorf("error reading grapher config file %s: %s", f.Name(), err)
}
for k, v := range grapherCfg.Sequences {
allGrapherCfgs.Sequences[k] = v
}
}
idf := id.NewGeneratorFactory(4, 100) // generate 4-character ids for jobs
grf := grapher.NewGrapherFactory(jobs.Factory, &allGrapherCfgs, idf)
return grf, nil
}
func MakeJobRunnerClient(ctx Context) (jr.Client, error) {
httpClient := &http.Client{}
jrcfg := ctx.Config.JRClient
if jrcfg.TLS.CertFile != "" && jrcfg.TLS.KeyFile != "" && jrcfg.TLS.CAFile != "" {
tlsConfig, err := util.NewTLSConfig(jrcfg.TLS.CAFile, jrcfg.TLS.CertFile, jrcfg.TLS.KeyFile)
if err != nil {
return nil, fmt.Errorf("error loading JR client TLS config: %s", err)
}
httpClient = &http.Client{
Transport: &http.Transport{TLSClientConfig: tlsConfig},
}
}
jrc := jr.NewClient(httpClient, jrcfg.ServerURL)
return jrc, nil
}
func MakeDbConnPool(ctx Context) (myconn.Connector, error) {
dbcfg := ctx.Config.Db
dsn := dbcfg.DSN + "?parseTime=true" // always needs to be set
if dbcfg.TLS.CAFile != "" && dbcfg.TLS.CertFile != "" && dbcfg.TLS.KeyFile != "" {
tlsConfig, err := util.NewTLSConfig(dbcfg.TLS.CAFile, dbcfg.TLS.CertFile, dbcfg.TLS.KeyFile)
if err != nil {
log.Fatalf("error loading database TLS config: %s", err)
}
mysql.RegisterTLSConfig("custom", tlsConfig)
dsn += "&tls=custom"
}
db, err := sql.Open("mysql", dsn)
if err != nil {
return nil, fmt.Errorf("error creating sql.DB: %s", err)
}
db.SetMaxIdleConns(10)
db.SetMaxOpenConns(100)
db.SetConnMaxLifetime(12 * time.Hour)
dbc := myconn.NewPool(db)
return dbc, nil
}
<file_sep>// Copyright 2017, Square, Inc.
package status_test
import (
"sort"
"testing"
"time"
"github.com/go-test/deep"
"github.com/square/spincycle/job-runner/chain"
"github.com/square/spincycle/job-runner/status"
"github.com/square/spincycle/proto"
"github.com/square/spincycle/test"
)
func TestRunning(t *testing.T) {
jc1 := &proto.JobChain{
RequestId: "chain1",
AdjacencyList: map[string][]string{
"job1": []string{"job2", "job3"},
},
Jobs: map[string]proto.Job{
"job1": proto.Job{
Id: "job1",
Type: "type1",
},
},
}
c1 := chain.NewChain(jc1)
c1.SetJobState("job1", proto.STATE_RUNNING) // sets runtime start ts
// Any short delay to make Runtime differ. chain1/job1 will have a runtime
// roughly equal to this delay, and chain2/job2 will have a runtime closer
// to zero.
time.Sleep(250 * time.Millisecond)
jc2 := &proto.JobChain{
RequestId: "chain2",
AdjacencyList: map[string][]string{
"job2": []string{"job2"},
},
Jobs: map[string]proto.Job{
"job2": proto.Job{
Id: "job2",
Type: "type2",
},
},
}
c2 := chain.NewChain(jc2)
c2.SetJobState("job2", proto.STATE_RUNNING) // sets runtime start ts
repo := chain.NewMemoryRepo()
if err := repo.Add(c1); err != nil {
t.Fatalf("error in Add: %v", err)
}
if err := repo.Add(c2); err != nil {
t.Fatalf("error in Add: %v", err)
}
m := status.NewManager(repo)
got, err := m.Running()
if err != nil {
t.Fatal(err)
}
if len(got) != 2 {
t.Fatalf("got %d proto.JobStatus, expected 2", len(got))
}
sort.Sort(proto.JobStatuses(got))
expect := []proto.JobStatus{
{
RequestId: "chain1", // longer runtime because of delay ^
JobId: "job1",
State: proto.STATE_RUNNING,
N: 1,
},
{
RequestId: "chain2",
JobId: "job2",
State: proto.STATE_RUNNING,
N: 1,
},
}
if got[0].Runtime <= 0 {
t.Errorf("job1 runtime %f, expected > 0", got[0].Runtime)
}
if got[1].Runtime <= 0 {
t.Errorf("job2 runtime %f, expected > 0", got[1].Runtime)
}
// Runtime is nondeterministic
expect[0].Runtime = got[0].Runtime
expect[1].Runtime = got[1].Runtime
if diff := deep.Equal(got, expect); diff != nil {
test.Dump(got)
t.Error(diff)
}
// chain1 runtime should be > chain2
if got[0].Runtime <= got[1].Runtime {
t.Errorf("runtime chain1 %f <= chain2 %f", got[0].Runtime, got[1].Runtime)
}
}
<file_sep>package cmd
import (
"fmt"
"github.com/square/spincycle/proto"
"github.com/square/spincycle/spinc/app"
)
type Status struct {
ctx app.Context
reqId string
}
func NewStatus(ctx app.Context) *Status {
return &Status{
ctx: ctx,
}
}
func (c *Status) Prepare() error {
if len(c.ctx.Command.Args) == 0 {
return fmt.Errorf("Usage: spinc status <id>\n")
}
c.reqId = c.ctx.Command.Args[0]
return nil
}
func (c *Status) Run() error {
status, err := c.ctx.RMClient.RequestStatus(c.reqId)
if err != nil {
return err
}
if c.ctx.Options.Debug {
app.Debug("status: %#v", status)
}
if c.ctx.Hooks.CommandRunResult != nil {
c.ctx.Hooks.CommandRunResult(status, err)
return nil
}
fmt.Printf("state: %s\n", proto.StateName[status.State])
fmt.Printf("jobs done: %d\n", status.FinishedJobs)
fmt.Printf("jobs total: %d\n", status.TotalJobs)
fmt.Printf("created: %s\n", status.CreatedAt)
fmt.Printf("started: %s\n", status.StartedAt)
fmt.Printf("finished: %s\n", status.FinishedAt)
fmt.Printf("user: %s\n", status.User)
fmt.Printf("type: %s\n", status.Type)
fmt.Printf("id: %s\n", status.Id)
return nil
}
<file_sep>// Copyright 2017-2018, Square, Inc.
package grapher
import (
"io/ioutil"
"gopkg.in/yaml.v2"
)
// NodeSpec defines the structure expected from the yaml file to define each nodes.
type NodeSpec struct {
Name string `yaml:"name"` // unique name assigned to this node
Category string `yaml:"category"` // "job" or "sequence"
NodeType string `yaml:"type"` // the type of job or sequence to create
Each []string `yaml:"each"` // arguments to repeat over
Args []*NodeArg `yaml:"args"` // expected arguments
Sets []string `yaml:"sets"` // expected job args to be set
Dependencies []string `yaml:"deps"` // nodes with out-edges leading to this node
Retry uint `yaml:"retry"` // the number of times to retry a "job" that fails
RetryWait uint `yaml:"retryWait"` // the time, in seconds, to sleep between "job" retries
}
// NodeArg defines the structure expected from the yaml file to define a job's args.
type NodeArg struct {
Expected string `yaml:"expected"` // the name of the argument that this job expects
Given string `yaml:"given"` // the name of the argument that will be given to this job
}
// SequenceSpec defines the structure expected from the config yaml file to
// define each sequence
type SequenceSpec struct {
Name string `yaml:"name"` // name of the sequence
Args SequenceArgs `yaml:"args"` // arguments to the sequence
Nodes map[string]*NodeSpec `yaml:"nodes"` // list of nodes that are a part of the sequence
}
// SequenceArgs defines the structure expected from the config file to define
// a sequence's arguments. A sequence can have required arguemnts; an arguments
// on this list that are missing will result in an error from Grapher.
// A sequence can also have optional arguemnts; arguments on this list that are
// missing will not result in an error. Additionally optional arguments can
// have default values that will be used if not explicitly given.
type SequenceArgs struct {
Required []*ArgSpec `yaml:"required"`
Optional []*ArgSpec `yaml:"optional"`
}
// ArgSpec defines the structure expected from the config to define sequence args.
type ArgSpec struct {
Name string `yaml:"name"`
Desc string `yaml:"desc"`
Default string `yaml:"default"`
}
// All Sequences in the yaml. Also contains the user defined no-op job.
type Config struct {
Sequences map[string]*SequenceSpec `yaml:"sequences"`
}
// ReadConfig will read from configFile and return a Config that the user
// can then use for NewGrapher(). configFile is expected to be in the yaml
// format specified.
func ReadConfig(configFile string) (*Config, error) {
sequenceData, err := ioutil.ReadFile(configFile)
if err != nil {
return nil, err
}
cfg := &Config{}
err = yaml.Unmarshal(sequenceData, cfg)
if err != nil {
return nil, err
}
for sequenceName, sequence := range cfg.Sequences {
sequence.Name = sequenceName
for nodeName, node := range sequence.Nodes {
node.Name = nodeName
}
}
return cfg, nil
}
// isSequence will return true if j is a Sequence, and false otherwise.
func (j *NodeSpec) isSequence() bool {
return j.Category == "sequence"
}
<file_sep>package cmd
import (
"fmt"
"github.com/square/spincycle/spinc/app"
)
const (
JOB_COL_LEN = 100
)
type Ps struct {
ctx app.Context
}
func NewPs(ctx app.Context) *Ps {
return &Ps{
ctx: ctx,
}
}
func (c *Ps) Prepare() error {
return nil
}
func (c *Ps) Run() error {
status, err := c.ctx.RMClient.SysStatRunning()
if err != nil {
return err
}
if c.ctx.Options.Debug {
app.Debug("status: %#v", status)
}
if c.ctx.Hooks.CommandRunResult != nil {
c.ctx.Hooks.CommandRunResult(status, err)
return nil
}
if len(status.Jobs) == 0 {
return nil
}
hdr := fmt.Sprintf("%%-32s %%4s %%5s %%6s %%s\n")
line := fmt.Sprintf("%%-32s %%4d %%5d %%6s %%s\n")
fmt.Fprintf(c.ctx.Out, hdr, "ID", "N", "NJOBS", "TIME", "JOB")
for _, r := range status.Jobs {
runtime := fmt.Sprintf("%.1f", r.Runtime)
job := r.JobId
m := 0
if status.Requests != nil {
if r, ok := status.Requests[r.RequestId]; ok {
m = r.TotalJobs
}
}
if len(job) > JOB_COL_LEN {
// "long_job_id@5" -> "..._job_id@5"
job = "..." + job[len(job)-(JOB_COL_LEN-3):len(job)] // +3 for "..."
}
fmt.Fprintf(c.ctx.Out, line, r.RequestId, r.N, m, runtime, r.JobId)
}
return nil
}
<file_sep>// Copyright 2017, Square, Inc.
package api_test
import (
"fmt"
"net/http"
"net/http/httptest"
"testing"
"github.com/go-test/deep"
"github.com/square/spincycle/proto"
"github.com/square/spincycle/request-manager/api"
"github.com/square/spincycle/request-manager/app"
testutil "github.com/square/spincycle/test"
"github.com/square/spincycle/test/mock"
)
var server *httptest.Server
func setup(rm *mock.RequestManager, jls *mock.JLStore) {
a := api.NewAPI(app.Defaults(), rm, jls, &mock.RMStatus{})
server = httptest.NewServer(a)
}
func cleanup() {
server.CloseClientConnections()
server.Close()
}
func baseURL() string {
if server != nil {
return server.URL + api.API_ROOT
}
return api.API_ROOT
}
// //////////////////////////////////////////////////////////////////////////
// Tests
// //////////////////////////////////////////////////////////////////////////
func TestNewRequestHandlerInvalidPayload(t *testing.T) {
payload := `"bad":"json"}` // Bad payload.
setup(&mock.RequestManager{}, &mock.JLStore{})
defer cleanup()
// Make the HTTP request.
statusCode, _, err := testutil.MakeHTTPRequest("POST", baseURL()+"requests", []byte(payload), nil)
if err != nil {
t.Fatal(err)
}
// Check that the status code is what we expect.
if statusCode != http.StatusBadRequest {
t.Errorf("response status = %d, expected %d", statusCode, http.StatusBadRequest)
}
}
func TestNewRequestHandlerRMError(t *testing.T) {
payload := `{"type":"something","args":{"first":"arg1"},"user":"mike"}`
// Create a mock request manager that will return an error and record the
// request params it receives.
var rmReqParams proto.CreateRequestParams
rm := &mock.RequestManager{
CreateFunc: func(reqParams proto.CreateRequestParams) (proto.Request, error) {
rmReqParams = reqParams
return proto.Request{}, mock.ErrRequestManager
},
}
setup(rm, &mock.JLStore{})
defer cleanup()
// Make the HTTP request.
statusCode, _, err := testutil.MakeHTTPRequest("POST", baseURL()+"requests", []byte(payload), nil)
if err != nil {
t.Fatal(err)
}
// Check that the status code is what we expect.
if statusCode != http.StatusInternalServerError {
t.Errorf("response status = %d, expected %d", statusCode, http.StatusInternalServerError)
}
// Check the request params sent to the request manager.
expectedReqParams := proto.CreateRequestParams{
Type: "something",
Args: map[string]interface{}{
"first": "arg1",
},
User: "admin", // the value from the payload is overwritten
}
if diff := deep.Equal(rmReqParams, expectedReqParams); diff != nil {
t.Error(diff)
}
}
func TestNewRequestHandlerSuccess(t *testing.T) {
payload := `{"type":"something","args":{"first":"arg1","second":"arg2"}}`
reqId := "abcd1234"
req := proto.Request{
Id: reqId,
State: proto.STATE_PENDING,
}
// Create a mock request manager that will return a request and record the
// request params it receives.
var rmReqParams proto.CreateRequestParams
rm := &mock.RequestManager{
CreateFunc: func(reqParams proto.CreateRequestParams) (proto.Request, error) {
rmReqParams = reqParams
return req, nil
},
}
setup(rm, &mock.JLStore{})
defer cleanup()
// Make the HTTP request.
var actualReq proto.Request
statusCode, headers, err := testutil.MakeHTTPRequest("POST", baseURL()+"requests", []byte(payload), &actualReq)
if err != nil {
t.Fatal(err)
}
// Check that the status code is what we expect.
if statusCode != http.StatusCreated {
t.Errorf("response status = %d, expected %d", statusCode, http.StatusCreated)
}
// Check that the response body is what we expect.
if diff := deep.Equal(actualReq, req); diff != nil {
t.Error(diff)
}
// Check the response location header.
expectedLocation := api.API_ROOT + "requests/" + req.Id
if len(headers["Location"]) < 1 {
t.Errorf("location header not set at all")
} else {
if headers["Location"][0] != expectedLocation {
t.Errorf("location header = %s, expected %s", headers["Location"][0], expectedLocation)
}
}
// Check the request params sent to the request manager.
expectedReqParams := proto.CreateRequestParams{
Type: "something",
Args: map[string]interface{}{
"first": "arg1",
"second": "arg2",
},
User: "admin",
}
if diff := deep.Equal(rmReqParams, expectedReqParams); diff != nil {
t.Error(diff)
}
}
func TestGetRequestHandlerSuccess(t *testing.T) {
reqId := "abcd1234"
req := proto.Request{
Id: reqId,
State: proto.STATE_PENDING,
}
// Create a mock request manager that will return a request.
rm := &mock.RequestManager{
GetFunc: func(r string) (proto.Request, error) {
return req, nil
},
}
setup(rm, &mock.JLStore{})
defer cleanup()
// Make the HTTP request.
var actualReq proto.Request
statusCode, _, err := testutil.MakeHTTPRequest("GET", baseURL()+"requests/"+reqId, []byte{}, &actualReq)
if err != nil {
t.Fatal(err)
}
// Check that the status code is what we expect.
if statusCode != http.StatusOK {
t.Errorf("response status = %d, expected %d", statusCode, http.StatusOK)
}
// Check that the response body is what we expect.
if diff := deep.Equal(actualReq, req); diff != nil {
t.Error(diff)
}
}
func TestStartRequestHandlerSuccess(t *testing.T) {
reqId := "abcd1234"
setup(&mock.RequestManager{}, &mock.JLStore{})
defer cleanup()
// Make the HTTP request.
statusCode, _, err := testutil.MakeHTTPRequest("PUT", baseURL()+"requests/"+reqId+"/start", []byte{}, nil)
if err != nil {
t.Fatal(err)
}
// Check that the status code is what we expect.
if statusCode != http.StatusOK {
t.Errorf("response status = %d, expected %d", statusCode, http.StatusOK)
}
}
func TestFinishRequestHandlerSuccess(t *testing.T) {
reqId := "abcd1234"
payload := []byte(fmt.Sprintf("{\"state\":%d}", proto.STATE_COMPLETE))
// Create a mock request manager that will record the finish params it receives.
var rmFinishParams proto.FinishRequestParams
rm := &mock.RequestManager{
FinishFunc: func(r string, f proto.FinishRequestParams) error {
rmFinishParams = f
return nil
},
}
setup(rm, &mock.JLStore{})
defer cleanup()
// Make the HTTP request.
statusCode, _, err := testutil.MakeHTTPRequest("PUT", baseURL()+"requests/"+reqId+"/finish", payload, nil)
if err != nil {
t.Fatal(err)
}
// Check that the status code is what we expect.
if statusCode != http.StatusOK {
t.Errorf("response status = %d, expected %d", statusCode, http.StatusOK)
}
// Check that the finish params sent to the request manager are what we expect.
expectedFinishParams := proto.FinishRequestParams{
State: proto.STATE_COMPLETE,
}
if diff := deep.Equal(rmFinishParams, expectedFinishParams); diff != nil {
t.Error(diff)
}
}
func TestStopRequestHandlerSuccess(t *testing.T) {
reqId := "abcd1234"
setup(&mock.RequestManager{}, &mock.JLStore{})
defer cleanup()
// Make the HTTP request.
statusCode, _, err := testutil.MakeHTTPRequest("PUT", baseURL()+"requests/"+reqId+"/stop", []byte{}, nil)
if err != nil {
t.Fatal(err)
}
// Check that the status code is what we expect.
if statusCode != http.StatusOK {
t.Errorf("response status = %d, expected %d", statusCode, http.StatusOK)
}
}
func TestStatusRequestHandlerSuccess(t *testing.T) {
reqId := "abcd1234"
reqStatus := proto.RequestStatus{
Request: proto.Request{
Id: reqId,
},
JobChainStatus: proto.JobChainStatus{
JobStatuses: proto.JobStatuses{
proto.JobStatus{JobId: "j1", Status: "status1", State: proto.STATE_RUNNING},
proto.JobStatus{JobId: "j2", Status: "status2", State: proto.STATE_FAIL},
},
},
}
// Create a mock request manager that will return a request status.
rm := &mock.RequestManager{
StatusFunc: func(r string) (proto.RequestStatus, error) {
return reqStatus, nil
},
}
setup(rm, &mock.JLStore{})
defer cleanup()
// Make the HTTP request.
var actualReqStatus proto.RequestStatus
statusCode, _, err := testutil.MakeHTTPRequest("GET", baseURL()+"requests/"+reqId+"/status", []byte{}, &actualReqStatus)
if err != nil {
t.Fatal(err)
}
// Check that the status code is what we expect.
if statusCode != http.StatusOK {
t.Errorf("response status = %d, expected %d", statusCode, http.StatusOK)
}
// Check that the request status is what we expect.
if diff := deep.Equal(actualReqStatus, reqStatus); diff != nil {
t.Error(diff)
}
}
func TestGetJobChainRequestHandlerSuccess(t *testing.T) {
reqId := "abcd1234"
jc := proto.JobChain{
RequestId: reqId,
State: proto.STATE_RUNNING,
}
// Create a mock jobchain store that will return a job chain.
rm := &mock.RequestManager{
JobChainFunc: func(r string) (proto.JobChain, error) {
return jc, nil
},
}
setup(rm, &mock.JLStore{})
defer cleanup()
// Make the HTTP request.
var actualJc proto.JobChain
statusCode, _, err := testutil.MakeHTTPRequest("GET", baseURL()+"requests/"+reqId+"/job-chain", []byte{}, &actualJc)
if err != nil {
t.Fatal(err)
}
// Check that the status code is what we expect.
if statusCode != http.StatusOK {
t.Errorf("response status = %d, expected %d", statusCode, http.StatusOK)
}
// Check that the job chain is what we expect.
if diff := deep.Equal(actualJc, jc); diff != nil {
t.Error(diff)
}
}
func TestGetJLHandlerSuccess(t *testing.T) {
reqId := "abcd1234"
jobId := "job1"
jl := proto.JobLog{
RequestId: reqId,
State: proto.STATE_COMPLETE,
}
// Create a mock joblog store that will return a jl.
jls := &mock.JLStore{
GetFunc: func(r string, j string) (proto.JobLog, error) {
return jl, nil
},
}
setup(&mock.RequestManager{}, jls)
defer cleanup()
// Make the HTTP request.
var actualjl proto.JobLog
statusCode, _, err := testutil.MakeHTTPRequest("GET",
baseURL()+"requests/"+reqId+"/log/"+jobId, []byte{}, &actualjl)
if err != nil {
t.Fatal(err)
}
// Check that the status code is what we expect.
if statusCode != http.StatusOK {
t.Errorf("response status = %d, expected %d", statusCode, http.StatusOK)
}
// Check that the job chain is what we expect.
if diff := deep.Equal(actualjl, jl); diff != nil {
t.Error(diff)
}
}
func TestCreateJLHandlerSuccess(t *testing.T) {
reqId := "abcd1234"
payload := []byte(fmt.Sprintf("{\"requestId\":\"%s\",\"state\":%d}", reqId, proto.STATE_COMPLETE))
jl := proto.JobLog{
RequestId: reqId,
State: proto.STATE_COMPLETE,
}
// Create a mock joblog store that will return a jl and record the jl it receives.
var rmjl proto.JobLog
jls := &mock.JLStore{
CreateFunc: func(r string, j proto.JobLog) (proto.JobLog, error) {
rmjl = j
return jl, nil
},
}
// Create a mock request manager that will record if it is called.
var rmCalled bool
rm := &mock.RequestManager{
IncrementFinishedJobsFunc: func(r string) error {
rmCalled = true
return nil
},
}
setup(rm, jls)
defer cleanup()
// Make the HTTP request.
var actualjl proto.JobLog
statusCode, _, err := testutil.MakeHTTPRequest("POST",
baseURL()+"requests/"+reqId+"/log", payload, &actualjl)
if err != nil {
t.Fatal(err)
}
// Check that the status code is what we expect.
if statusCode != http.StatusCreated {
t.Errorf("response status = %d, expected %d", statusCode, http.StatusCreated)
}
// Check that the response body is what we expect.
if diff := deep.Equal(actualjl, jl); diff != nil {
t.Error(diff)
}
// Check the jl sent to the request manager is what we expect.
if diff := deep.Equal(rmjl, jl); diff != nil {
t.Error(diff)
}
// Check that the IncrementFinishedJobs method on the request manager was called.
if rmCalled != true {
t.Errorf("IncrementFinishedJob on the request manager was not called, expected it to be")
}
}
func TestCreateJLHandlerJobFailed(t *testing.T) {
reqId := "abcd1234"
payload := []byte(fmt.Sprintf("{\"requestId\":\"%s\",\"state\":%d}", reqId, proto.STATE_FAIL))
jl := proto.JobLog{
RequestId: reqId,
State: proto.STATE_FAIL,
}
// Create a mock joblog store that will return a jl.
jls := &mock.JLStore{
CreateFunc: func(r string, j proto.JobLog) (proto.JobLog, error) {
return jl, nil
},
}
// Create a mock request manager that will record if it is called.
var rmCalled bool
rm := &mock.RequestManager{
IncrementFinishedJobsFunc: func(r string) error {
rmCalled = true
return nil
},
}
setup(rm, jls)
defer cleanup()
// Make the HTTP request.
var actualjl proto.JobLog
statusCode, _, err := testutil.MakeHTTPRequest("POST",
baseURL()+"requests/"+reqId+"/log", payload, &actualjl)
if err != nil {
t.Fatal(err)
}
// Check that the status code is what we expect.
if statusCode != http.StatusCreated {
t.Errorf("response status = %d, expected %d", statusCode, http.StatusCreated)
}
// Check that the IncrementFinishedJobs method on the request manager was NOT called.
if rmCalled != false {
t.Errorf("IncrementFinishedJob on the request manager was called, expected it not to be")
}
}
<file_sep>// Copyright 2017, Square, Inc.
package chain
import (
"fmt"
log "github.com/Sirupsen/logrus"
"github.com/square/spincycle/job-runner/runner"
"github.com/square/spincycle/proto"
rm "github.com/square/spincycle/request-manager"
)
// A Traverser provides the ability to run a job chain while respecting the
// dependencies between the jobs.
type Traverser interface {
// Run traverses a job chain and runs all of the jobs in it. It starts by
// running the first job in the chain, and then, if the job completed,
// successfully, running its adjacent jobs. This process continues until there
// or no more jobs to run, or until the Stop method is called on the traverser.
//
// It returns an error if it fails to start.
Run() error
// Stop makes a traverser stop traversing its job chain. It also sends a stop
// signal to all of the jobs that a traverser is running.
//
// It returns an error if it fails to stop all running jobs.
Stop() error
// Status gets the status of all running and failed jobs. Since a job can only
// run when all of its ancestors have completed, the state of the entire chain
// can be inferred from this information - every job in the chain before a
// running or failed job must be complete, and every job in the chain after a
// running or failed job must be pending.
//
// It returns an error if it fails to get the status of all running jobs.
Status() (proto.JobChainStatus, error)
}
// A TraverserFactory makes new Traverser.
type TraverserFactory interface {
Make(proto.JobChain) (Traverser, error)
}
type traverserFactory struct {
chainRepo Repo
rf runner.Factory
rmc rm.Client
}
func NewTraverserFactory(cr Repo, rf runner.Factory, rmc rm.Client) TraverserFactory {
return &traverserFactory{
chainRepo: cr,
rf: rf,
rmc: rmc,
}
}
// Make makes a Traverser for the given job chain. The chain is first validated
// and saved to the chain repo.
func (f *traverserFactory) Make(jobChain proto.JobChain) (Traverser, error) {
// Convert/wrap chain from proto to Go object.
chain := NewChain(&jobChain)
// Validate the chain
err := chain.Validate()
if err != nil {
return nil, err
}
// Save the chain. If this JR instance dies, another can recover the chain
// from the repo.
err = f.chainRepo.Set(chain)
if err != nil {
return nil, fmt.Errorf("cannot save chain to repo: %s", err)
}
// Create and return a traverser for the chain. The traverser is responsible
// for the chain: running, cleaning up, removing from repo when done, etc.
// And traverser and chain have the same lifespan: traverser is done when
// chain is done.
tr := NewTraverser(chain, f.chainRepo, f.rf, f.rmc)
return tr, nil
}
// A traverser represents a job chain and everything needed to traverse it.
type traverser struct {
// The chain that will be traversed.
chain *chain
// Repo for acessing/updating chains.
chainRepo Repo
// Factory for creating job runners.
rf runner.Factory
// Repo for keeping track of active jobs.
runnerRepo runner.Repo
// Used to stop a running traverser.
stopChan chan struct{}
// Queue for processing jobs that need to run.
runJobChan chan proto.Job
// Queue for processing jobs that are done running.
doneJobChan chan proto.Job
// Client for communicating with the Request Manager.
rmc rm.Client
// Used for logging.
logger *log.Entry
}
// NewTraverser creates a new traverser for a job chain.
func NewTraverser(chain *chain, cr Repo, rf runner.Factory, rmc rm.Client) *traverser {
return &traverser{
chain: chain,
chainRepo: cr,
rf: rf,
runnerRepo: runner.NewRepo(),
stopChan: make(chan struct{}),
runJobChan: make(chan proto.Job),
doneJobChan: make(chan proto.Job),
rmc: rmc,
// Include the request id in all logging.
logger: log.WithFields(log.Fields{"requestId": chain.RequestId()}),
}
}
// Run runs all jobs in the chain and blocks until all jobs complete or a job fails.
func (t *traverser) Run() error {
t.logger.Infof("chain traverser start")
defer t.logger.Infof("chain traverser done")
var finalState byte
defer func() {
// Set final state of chain in repo. This will be very short-lived because
// next we'll finalize the request in the RM. Although short-lived, we set
// it case there's problems finalizing with RM.
t.chain.SetState(finalState)
t.chainRepo.Set(t.chain)
// Mark the request as finished in the Request Manager.
if err := t.rmc.FinishRequest(t.chain.RequestId(), finalState); err != nil {
t.logger.Errorf("problem reporting status of the finished chain: %s", err)
} else {
t.chainRepo.Remove(t.chain.RequestId())
}
}()
firstJob, err := t.chain.FirstJob()
if err != nil {
finalState = proto.STATE_FAIL
return err
}
// Set the starting state of the chain.
t.chain.SetState(proto.STATE_RUNNING)
t.chainRepo.Set(t.chain)
// Start a goroutine to run jobs. This consumes from the runJobChan. When
// jobs are done, they will be sent to the doneJobChan, which gets consumed
// from right below this.
go t.runJobs()
// Set the state of the first job in the chain to RUNNING.
t.chain.SetJobState(firstJob.Id, proto.STATE_RUNNING)
t.chainRepo.Set(t.chain)
// Add the first job in the chain to the runJobChan.
t.logger.Infof("sending the first job (%s) to runJobChan", firstJob.Id)
t.runJobChan <- firstJob
// When a job finishes, update the state of the chain and figure out what
// to do next (check to see if the entire chain is done running, and
// enqueue the next jobs if there are any).
JOB_REAPER:
for doneJ := range t.doneJobChan {
jLogger := t.logger.WithFields(log.Fields{"job_id": doneJ.Id})
// Set the final state of the job in the chain.
t.chain.SetJobState(doneJ.Id, doneJ.State)
t.chainRepo.Set(t.chain)
// Check to see if the entire chain is done. If it is, break out of
// the loop on doneJobChan because there is no more work for us to do.
//
// A chain is done if no more jobs in it can run. A chain is
// complete if every job in it completed successfully.
done, complete := t.chain.IsDone()
if done {
close(t.runJobChan)
if complete {
t.logger.Infof("chain is done, all jobs finished successfully")
finalState = proto.STATE_COMPLETE
} else {
t.logger.Warn("chain is done, some jobs failed")
finalState = proto.STATE_FAIL
}
break
}
// If the job did not complete successfully, then ignore subsequent jobs.
// For example, if job B of A -> B -> C fails, then C is not ran.
if doneJ.State != proto.STATE_COMPLETE {
jLogger.Warn("job did not complete successfully")
continue JOB_REAPER
}
// Job completed successfully, so enqueue/run the next jobs in the chain.
// This will yield multiple next jobs when the current job is the start of
// a sequence (a fanout node).
jLogger.Infof("job completed successfully")
NEXT_JOB:
for _, nextJ := range t.chain.NextJobs(doneJ.Id) {
nextJLogger := jLogger.WithFields(log.Fields{"next_job_id": nextJ.Id})
// Check to make sure the job is ready to run. It might not be if it has
// upstream dependencies that are still running.
if !t.chain.JobIsReady(nextJ.Id) {
nextJLogger.Infof("next job is not ready to run - not enqueuing it")
continue NEXT_JOB
}
nextJLogger.Infof("next job is ready to run - enqueuing it")
// Copy the jobData from the job that just finished to the next job.
// It is important to note that when a job has multiple parent
// jobs, it will get its jobData from whichever parent finishes
// last. Therefore, a job should never rely on jobData that was
// created during an unrelated sequence at any time earlier in
// the chain.
for k, v := range doneJ.Data {
nextJ.Data[k] = v
}
// Set the state of the job in the chain to "Running".
// @todo: this should be in the goroutine in runJobs, because it's not
// truly running until that point, but then this causes race conditions
// which indicates we need to more closely examine concurrent
// access to internal chain data.
t.chain.SetJobState(nextJ.Id, proto.STATE_RUNNING)
t.runJobChan <- nextJ // add the job to the run queue
}
// Update chain repo for jobs next jobs just started ^
t.chainRepo.Set(t.chain)
}
return nil
}
// Stop stops the traverser if it's running.
func (t *traverser) Stop() error {
t.logger.Infof("stopping traverser and all jobs")
// Stop the traverser (i.e., stop running new jobs).
close(t.stopChan)
// Get all of the active runners for this traverser from the repo. Only runners
// that are in the repo will be stopped.
activeRunners, err := t.runnerRepo.Items()
if err != nil {
return err
}
// Call Stop on each runner, and then remove it from the repo.
for jobId, runner := range activeRunners {
err := runner.Stop() // this should return quickly
if err != nil {
return err
}
t.runnerRepo.Remove(jobId)
}
return nil
}
// Status returns the status of currently running jobs in the chain.
func (t *traverser) Status() (proto.JobChainStatus, error) {
t.logger.Infof("getting the status of all running jobs")
var jcStatus proto.JobChainStatus
var jobStatuses []proto.JobStatus
// Get all of the active runners for this traverser from the repo. Only runners
// that are in the repo will have their statuses queried.
activeRunners, err := t.runnerRepo.Items()
if err != nil {
return jcStatus, err
}
// Get the status and state of each job runner.
for jobId, runner := range activeRunners {
jobStatus := proto.JobStatus{
JobId: jobId,
Name: t.chain.JobChain.Jobs[jobId].Name, // the name of the job
State: t.chain.JobState(jobId), // get the state of the job
Status: runner.Status(), // get the job status. this should return quickly
}
jobStatuses = append(jobStatuses, jobStatus)
}
jcStatus = proto.JobChainStatus{
RequestId: t.chain.RequestId(),
JobStatuses: jobStatuses,
}
return jcStatus, nil
}
// -------------------------------------------------------------------------- //
// runJobs loops on the runJobChannel, and for each job that comes through the
// channel, it creates a job.Job interface and runs it in a goroutine. If there
// are any errors creating the job.Job it creates a JL that contains the error
// and attaches it to the job. When it's done, it sends the job out through the
// doneJobChan.
func (t *traverser) runJobs() {
for runnableJob := range t.runJobChan {
go func(pJob proto.Job) {
// Always return the job when done, else the traverser will block.
defer func() { t.doneJobChan <- pJob }()
// Make a job runner. If an error is encountered, set the
// state of the job to FAIL and create a JL with the error.
runner, err := t.rf.Make(pJob, t.chain.RequestId())
if err != nil {
pJob.State = proto.STATE_FAIL
t.sendJL(pJob, err) // need to send a JL to the RM so that it knows this job failed
return
}
// Add the runner to the repo. Runners in the repo are used
// by the Status and Stop methods on the traverser.
t.runnerRepo.Set(pJob.Id, runner)
defer t.runnerRepo.Remove(pJob.Id)
// Bail out if Stop was called. It is important that this check happens AFTER
// the runner is added to the repo, because if Stop gets called between the
// time that a job runner is created and it is added to the repo, there will
// be nothing to stop that job from running.
select {
case <-t.stopChan:
pJob.State = proto.STATE_STOPPED
err = fmt.Errorf("not starting job because traverser has already been stopped")
t.sendJL(pJob, err) // need to send a JL to the RM so that it knows this job failed
return
default:
}
// Run the job. This is a blocking operation that could take a long time.
finalState := runner.Run(pJob.Data)
// The traverser only cares about if a job completes or fails. Therefore,
// we set the state of every job that isn't COMPLETE to be FAIL.
if finalState != proto.STATE_COMPLETE {
finalState = proto.STATE_FAIL
}
pJob.State = finalState
}(runnableJob)
}
}
func (t *traverser) sendJL(pJob proto.Job, err error) {
jLogger := t.logger.WithFields(log.Fields{"job_id": pJob.Id})
jl := proto.JobLog{
RequestId: t.chain.RequestId(),
JobId: pJob.Id,
Name: pJob.Name,
Type: pJob.Type,
StartedAt: 0, // zero because the job never ran
FinishedAt: 0,
State: pJob.State,
Exit: 1,
Error: err.Error(),
}
// Send the JL to the RM.
err = t.rmc.CreateJL(t.chain.RequestId(), jl)
if err != nil {
jLogger.Errorf("problem sending job log (%#v) to the RM: %s", jl, err)
}
}
<file_sep>// Copyright 2017, Square, Inc.
package runner
import (
"fmt"
"github.com/orcaman/concurrent-map"
)
// Repo is a small wrapper around a concurrent map that provides the ability to
// store and retreive Runners in a thread-safe way.
type Repo interface {
Set(key string, value Runner)
Remove(key string)
Items() (map[string]Runner, error)
}
type repo struct {
c cmap.ConcurrentMap
}
func NewRepo() Repo {
return &repo{
c: cmap.New(),
}
}
// Set sets a Runner in the repo.
func (r *repo) Set(key string, value Runner) {
r.c.Set(key, value)
}
// Remove removes a runner from the repo.
func (r *repo) Remove(key string) {
r.c.Remove(key)
}
// Items returns a map of key => Runner with all the Runners in the repo.
func (r *repo) Items() (map[string]Runner, error) {
runners := map[string]Runner{} // key => runner
vals := r.c.Items()
for key, val := range vals {
runner, ok := val.(Runner)
if !ok {
return runners, fmt.Errorf("invalid runner in repo for key=%s", key) // should be impossible
}
runners[key] = runner
}
return runners, nil
}
<file_sep>// Copyright 2017, Square, Inc.
package chain
import (
"reflect"
"sort"
"testing"
"github.com/square/spincycle/proto"
testutil "github.com/square/spincycle/test"
)
func TestFirstJobMultiple(t *testing.T) {
jc := &proto.JobChain{
Jobs: testutil.InitJobs(4),
AdjacencyList: map[string][]string{
"job1": {"job3"},
"job2": {"job3"},
"job3": {"job4"},
},
}
c := NewChain(jc)
_, err := c.FirstJob()
if err == nil {
t.Errorf("expected an error, but did not get one")
}
}
func TestFirstJobOne(t *testing.T) {
jc := &proto.JobChain{
Jobs: testutil.InitJobs(4),
AdjacencyList: map[string][]string{
"job1": {"job2", "job3"},
"job2": {"job4"},
"job3": {"job4"},
},
}
c := NewChain(jc)
expectedFirstJob := c.JobChain.Jobs["job1"]
firstJob, err := c.FirstJob()
if !reflect.DeepEqual(firstJob, expectedFirstJob) {
t.Errorf("firstJob = %v, expected %v", firstJob, expectedFirstJob)
}
if err != nil {
t.Errorf("err = %s, expected nil", err)
}
}
func TestLastJobMultiple(t *testing.T) {
jc := &proto.JobChain{
Jobs: testutil.InitJobs(3),
AdjacencyList: map[string][]string{
"job1": {"job2", "job3"},
},
}
c := NewChain(jc)
_, err := c.LastJob()
if err == nil {
t.Errorf("expected an error, but did not get one")
}
}
func TestLastJobOne(t *testing.T) {
jc := &proto.JobChain{
Jobs: testutil.InitJobs(4),
AdjacencyList: map[string][]string{
"job1": {"job2", "job3"},
"job2": {"job4"},
"job3": {"job4"},
},
}
c := NewChain(jc)
expectedLastJob := c.JobChain.Jobs["job4"]
lastJob, err := c.LastJob()
if !reflect.DeepEqual(lastJob, expectedLastJob) {
t.Errorf("lastJob = %v, expected %v", lastJob, expectedLastJob)
}
if err != nil {
t.Errorf("err = %s, expected nil", err)
}
}
func TestNextJobs(t *testing.T) {
jc := &proto.JobChain{
Jobs: testutil.InitJobs(4),
AdjacencyList: map[string][]string{
"job1": {"job2", "job3"},
"job2": {"job4"},
"job3": {"job4"},
},
}
c := NewChain(jc)
expectedNextJobs := proto.Jobs{c.JobChain.Jobs["job2"], c.JobChain.Jobs["job3"]}
sort.Sort(expectedNextJobs)
nextJobs := c.NextJobs("job1")
sort.Sort(nextJobs)
if !reflect.DeepEqual(nextJobs, expectedNextJobs) {
t.Errorf("nextJobs = %v, want %v", nextJobs, expectedNextJobs)
}
nextJobs = c.NextJobs("job4")
if len(nextJobs) != 0 {
t.Errorf("nextJobs count = %d, want 0", len(nextJobs))
}
}
func TestPreviousJobs(t *testing.T) {
jc := &proto.JobChain{
Jobs: testutil.InitJobs(4),
AdjacencyList: map[string][]string{
"job1": {"job2", "job3"},
"job2": {"job4"},
"job3": {"job4"},
},
}
c := NewChain(jc)
expectedPreviousJobs := proto.Jobs{c.JobChain.Jobs["job2"], c.JobChain.Jobs["job3"]}
sort.Sort(expectedPreviousJobs)
previousJobs := c.PreviousJobs("job4")
sort.Sort(previousJobs)
if !reflect.DeepEqual(previousJobs, expectedPreviousJobs) {
t.Errorf("previousJobs = %v, want %v", previousJobs, expectedPreviousJobs)
}
previousJobs = c.PreviousJobs("job1")
if len(previousJobs) != 0 {
t.Errorf("previousJobs count = %d, want 0", len(previousJobs))
}
}
func TestJobIsReady(t *testing.T) {
jc := &proto.JobChain{
Jobs: testutil.InitJobs(4),
AdjacencyList: map[string][]string{
"job1": {"job2", "job3"},
"job2": {"job4"},
"job3": {"job4"},
},
}
c := NewChain(jc)
c.SetJobState("job2", proto.STATE_COMPLETE)
c.SetJobState("job3", proto.STATE_PENDING)
expectedReady := false
ready := c.JobIsReady("job4")
if ready != expectedReady {
t.Errorf("ready = %t, want %t", ready, expectedReady)
}
expectedReady = true
ready = c.JobIsReady("job5")
if ready != expectedReady {
t.Errorf("ready = %t, want %t", ready, expectedReady)
}
}
// When the chain is not done or complete.
func TestIsDoneJobRunning(t *testing.T) {
jc := &proto.JobChain{
Jobs: testutil.InitJobs(4),
AdjacencyList: map[string][]string{
"job1": {"job2", "job3"},
"job2": {"job4"},
},
}
c := NewChain(jc)
c.SetJobState("job1", proto.STATE_RUNNING)
expectedDone := false
expectedComplete := false
done, complete := c.IsDone()
if done != expectedDone || complete != expectedComplete {
t.Errorf("done = %t, complete = %t, want %t and %t", done, complete, expectedDone, expectedComplete)
}
}
// When the chain is done but not complete.
func TestIsDoneNotComplete(t *testing.T) {
jc := &proto.JobChain{
Jobs: testutil.InitJobs(4),
AdjacencyList: map[string][]string{
"job1": {"job2", "job3"},
"job2": {"job4"},
},
}
c := NewChain(jc)
c.SetJobState("job1", proto.STATE_COMPLETE)
c.SetJobState("job2", proto.STATE_FAIL)
c.SetJobState("job3", proto.STATE_COMPLETE)
c.SetJobState("job4", proto.STATE_PENDING)
expectedDone := true
expectedComplete := false
done, complete := c.IsDone()
if done != expectedDone || complete != expectedComplete {
t.Errorf("done = %t, complete = %t, want %t and %t", done, complete, expectedDone, expectedComplete)
}
// Make sure we can handle unknown states
c.SetJobState("job4", proto.STATE_UNKNOWN)
done, complete = c.IsDone()
if done != expectedDone || complete != expectedComplete {
t.Errorf("done = %t, complete = %t, want %t and %t", done, complete, expectedDone, expectedComplete)
}
}
// When the chain is done and complete.
func TestIsDoneComplete(t *testing.T) {
jc := &proto.JobChain{
Jobs: testutil.InitJobs(4),
AdjacencyList: map[string][]string{
"job1": {"job2", "job3"},
"job2": {"job4"},
},
}
c := NewChain(jc)
c.SetJobState("job1", proto.STATE_COMPLETE)
c.SetJobState("job2", proto.STATE_COMPLETE)
c.SetJobState("job3", proto.STATE_COMPLETE)
c.SetJobState("job4", proto.STATE_COMPLETE)
expectedDone := true
expectedComplete := true
done, complete := c.IsDone()
if done != expectedDone || complete != expectedComplete {
t.Errorf("done = %t, complete = %t, want %t and %t", done, complete, expectedDone, expectedComplete)
}
}
func TestSetJobState(t *testing.T) {
jc := &proto.JobChain{
Jobs: testutil.InitJobs(1),
}
c := NewChain(jc)
c.SetJobState("job1", proto.STATE_COMPLETE)
if c.JobChain.Jobs["job1"].State != proto.STATE_COMPLETE {
t.Errorf("State = %d, want %d", c.JobChain.Jobs["job1"].State, proto.STATE_COMPLETE)
}
}
func TestSetState(t *testing.T) {
jc := &proto.JobChain{}
c := NewChain(jc)
c.SetState(proto.STATE_RUNNING)
if c.JobChain.State != proto.STATE_RUNNING {
t.Errorf("State = %d, want %d", c.JobChain.State, proto.STATE_RUNNING)
}
}
func TestIndegreeCounts(t *testing.T) {
jc := &proto.JobChain{
Jobs: testutil.InitJobs(9),
AdjacencyList: map[string][]string{
"job1": {"job2", "job3"},
"job2": {"job4", "job5"},
"job3": {"job5", "job6"},
"job4": {"job6", "job7"},
"job5": {"job6"},
"job6": {"job8"},
"job7": {"job8"},
},
}
c := NewChain(jc)
expectedCounts := map[string]int{
"job1": 0,
"job2": 1,
"job3": 1,
"job4": 1,
"job5": 2,
"job6": 3,
"job7": 1,
"job8": 2,
"job9": 0,
}
counts := c.indegreeCounts()
if !reflect.DeepEqual(counts, expectedCounts) {
t.Errorf("counts = %v, want %v", counts, expectedCounts)
}
}
func TestOutdegreeCounts(t *testing.T) {
jc := &proto.JobChain{
Jobs: testutil.InitJobs(7),
AdjacencyList: map[string][]string{
"job1": {"job2", "job3"},
"job2": {"job4", "job5", "job6"},
"job3": {"job5", "job6"},
"job4": {"job5", "job6"},
"job5": {"job6"},
"job6": {"job7"},
},
}
c := NewChain(jc)
expectedCounts := map[string]int{
"job1": 2,
"job2": 3,
"job3": 2,
"job4": 2,
"job5": 1,
"job6": 1,
"job7": 0,
}
counts := c.outdegreeCounts()
if !reflect.DeepEqual(counts, expectedCounts) {
t.Errorf("counts = %v, want %v", counts, expectedCounts)
}
}
func TestIsAcyclic(t *testing.T) {
// No cycle in the chain.
jc := &proto.JobChain{
Jobs: testutil.InitJobs(4),
AdjacencyList: map[string][]string{
"job1": {"job2", "job3"},
"job2": {"job4"},
"job3": {"job4"},
},
}
c := NewChain(jc)
expectedIsAcyclic := true
isAcyclic := c.isAcyclic()
if isAcyclic != expectedIsAcyclic {
t.Errorf("isAcyclic = %t, want %t", isAcyclic, expectedIsAcyclic)
}
// Cycle from end to beginning of the chain (i.e., there is no first job).
jc = &proto.JobChain{
Jobs: testutil.InitJobs(4),
AdjacencyList: map[string][]string{
"job1": {"job2", "job3"},
"job2": {"job4"},
"job3": {"job4"},
"job4": {"job1"},
},
}
c = NewChain(jc)
expectedIsAcyclic = false
isAcyclic = c.isAcyclic()
if isAcyclic != expectedIsAcyclic {
t.Errorf("isAcyclic = %t, want %t", isAcyclic, expectedIsAcyclic)
}
// Cycle in the middle of the chain.
jc = &proto.JobChain{
Jobs: testutil.InitJobs(4),
AdjacencyList: map[string][]string{
"job1": {"job2", "job3"},
"job2": {"job4"},
"job3": {"job5"},
"job4": {"job5"},
"job5": {"job2", "job6"},
},
}
c = NewChain(jc)
expectedIsAcyclic = false
isAcyclic = c.isAcyclic()
if isAcyclic != expectedIsAcyclic {
t.Errorf("isAcyclic = %t, want %t", isAcyclic, expectedIsAcyclic)
}
// No cycle, but multiple first jobs and last jobs.
jc = &proto.JobChain{
Jobs: testutil.InitJobs(5),
AdjacencyList: map[string][]string{
"job1": {"job3"},
"job2": {"job3"},
"job3": {"job4", "job5"},
},
}
c = NewChain(jc)
expectedIsAcyclic = true
isAcyclic = c.isAcyclic()
if isAcyclic != expectedIsAcyclic {
t.Errorf("isAcyclic = %t, want %t", isAcyclic, expectedIsAcyclic)
}
}
func TestValidateAdjacencyList(t *testing.T) {
// Invalid 1.
jc := &proto.JobChain{
Jobs: testutil.InitJobs(2),
AdjacencyList: map[string][]string{
"job1": {"job2", "job3"},
},
}
c := NewChain(jc)
expectedValid := false
valid := c.adjacencyListIsValid()
if valid != expectedValid {
t.Errorf("valid = %t, expected %t", valid, expectedValid)
}
// Invalid 2.
jc = &proto.JobChain{
Jobs: testutil.InitJobs(2),
AdjacencyList: map[string][]string{
"job1": {"job2"},
"job7": {},
},
}
c = NewChain(jc)
expectedValid = false
valid = c.adjacencyListIsValid()
if valid != expectedValid {
t.Errorf("valid = %t, expected %t", valid, expectedValid)
}
// Valid.
jc = &proto.JobChain{
Jobs: testutil.InitJobs(3),
AdjacencyList: map[string][]string{
"job1": {"job2"},
"job2": {"job3"},
},
}
c = NewChain(jc)
expectedValid = true
valid = c.adjacencyListIsValid()
if valid != expectedValid {
t.Errorf("valid = %t, expected %t", valid, expectedValid)
}
}
<file_sep>// Copyright 2017-2018, Square, Inc.
// Package server bootstraps the Job Runner.
package server
import (
"fmt"
"github.com/orcaman/concurrent-map"
"github.com/square/spincycle/config"
"github.com/square/spincycle/job-runner/api"
"github.com/square/spincycle/job-runner/app"
"github.com/square/spincycle/job-runner/chain"
"github.com/square/spincycle/job-runner/runner"
"github.com/square/spincycle/job-runner/status"
"github.com/square/spincycle/jobs"
)
// Run runs the Job Runner API in the foreground. It returns when the API stops.
func Run(appCtx app.Context) error {
if err := loadConfig(&appCtx); err != nil {
return err
}
api, err := makeAPI(appCtx)
if err != nil {
return err
}
return api.Run()
}
type Server struct {
appCtx app.Context
api *api.API
}
func NewServer(appCtx app.Context) *Server {
return &Server{
appCtx: appCtx,
}
}
func (s *Server) Boot() error {
if s.api != nil {
return nil
}
if err := loadConfig(&s.appCtx); err != nil {
return err
}
api, err := makeAPI(s.appCtx)
if err != nil {
return err
}
s.api = api
return nil
}
func (s *Server) API() *api.API {
return s.api
}
// --------------------------------------------------------------------------
func loadConfig(appCtx *app.Context) error {
var err error
var cfg config.JobRunner
if appCtx.Hooks.LoadConfig != nil {
cfg, err = appCtx.Hooks.LoadConfig(*appCtx)
} else {
cfg, err = appCtx.Hooks.LoadConfig(*appCtx)
}
if err != nil {
return fmt.Errorf("error loading config at %s", err)
}
appCtx.Config = cfg
return nil
}
func makeAPI(appCtx app.Context) (*api.API, error) {
var err error
// //////////////////////////////////////////////////////////////////////
// Request Manager Client
// //////////////////////////////////////////////////////////////////////
rmc, err := appCtx.Factories.MakeRequestManagerClient(appCtx)
if err != nil {
return nil, fmt.Errorf("error loading config at %s", err)
}
// //////////////////////////////////////////////////////////////////////
// Chain repo
// //////////////////////////////////////////////////////////////////////
chainRepo, err := appCtx.Factories.MakeChainRepo(appCtx)
if err != nil {
return nil, fmt.Errorf("error loading config at %s", err)
}
// //////////////////////////////////////////////////////////////////////
// API
// //////////////////////////////////////////////////////////////////////
stat := status.NewManager(chainRepo)
rf := runner.NewFactory(jobs.Factory, rmc)
trFactory := chain.NewTraverserFactory(chainRepo, rf, rmc)
trRepo := cmap.New()
return api.NewAPI(appCtx, trFactory, trRepo, stat), nil
}
<file_sep>// Copyright 2017, Square, Inc.
// Package chain implements a job chain. It provides the ability to traverse a chain
// and run all of the jobs in it.
package chain
import (
"fmt"
"sync"
"time"
"github.com/square/spincycle/proto"
)
// chain represents a job chain and some meta information about it.
type chain struct {
JobChain *proto.JobChain `json:"jobChain"`
Running map[string]RunningJob `json:"running"` // keyed on job ID => start time (Unix nano)
N uint
*sync.RWMutex
}
type RunningJob struct {
N uint `json:"n"`
StartTs int64 `json:"startTs"`
}
// NewChain takes a JobChain proto (from the RM) and turns it into a Chain that
// the JR can use.
func NewChain(jc *proto.JobChain) *chain {
// Set the state of all jobs in the chain to "Pending".
for jobName, job := range jc.Jobs {
job.State = proto.STATE_PENDING
job.Data = map[string]interface{}{}
jc.Jobs[jobName] = job
}
return &chain{
JobChain: jc,
Running: map[string]RunningJob{},
N: 0,
RWMutex: &sync.RWMutex{},
}
}
// ErrInvalidChain is the error returned when a chain is not valid.
type ErrInvalidChain struct {
Message string
}
func (e ErrInvalidChain) Error() string {
return e.Error()
}
// FirstJob finds the job in the chain with indegree 0. If there is not
// exactly one of these jobs, it returns an error.
func (c *chain) FirstJob() (proto.Job, error) {
var jobIds []string
for jobId, count := range c.indegreeCounts() {
if count == 0 {
jobIds = append(jobIds, jobId)
}
}
if len(jobIds) != 1 {
return proto.Job{}, ErrInvalidChain{
Message: fmt.Sprintf("chain has %d first job(s), should "+
"have one (first job(s) = %v)", len(jobIds), jobIds),
}
}
return c.JobChain.Jobs[jobIds[0]], nil
}
// LastJob finds the job in the chain with outdegree 0. If there is not
// exactly one of these jobs, it returns an error.
func (c *chain) LastJob() (proto.Job, error) {
var jobIds []string
for jobId, count := range c.outdegreeCounts() {
if count == 0 {
jobIds = append(jobIds, jobId)
}
}
if len(jobIds) != 1 {
return proto.Job{}, ErrInvalidChain{
Message: fmt.Sprintf("chain has %d last job(s), should "+
"have one (last job(s) = %v)", len(jobIds), jobIds),
}
}
return c.JobChain.Jobs[jobIds[0]], nil
}
// NextJobs finds all of the jobs adjacent to the given job.
func (c *chain) NextJobs(jobId string) proto.Jobs {
var nextJobs proto.Jobs
if nextJobIds, ok := c.JobChain.AdjacencyList[jobId]; ok {
for _, id := range nextJobIds {
if val, ok := c.JobChain.Jobs[id]; ok {
nextJobs = append(nextJobs, val)
}
}
}
return nextJobs
}
// PreviousJobs finds all of the immediately previous jobs to a given job.
func (c *chain) PreviousJobs(jobId string) proto.Jobs {
var prevJobs proto.Jobs
for curJob, nextJobs := range c.JobChain.AdjacencyList {
if contains(nextJobs, jobId) {
if val, ok := c.JobChain.Jobs[curJob]; ok {
prevJobs = append(prevJobs, val)
}
}
}
return prevJobs
}
// JobIsReady returns whether or not a job is ready to run. A job is considered
// ready to run if all of its previous jobs are complete. If any previous jobs
// are not complete, the job is not ready to run.
func (c *chain) JobIsReady(jobId string) bool {
isReady := true
for _, job := range c.PreviousJobs(jobId) {
if job.State != proto.STATE_COMPLETE {
isReady = false
}
}
return isReady
}
// IsDone returns two booleans - the first one indicates whether or not the
// chain is done, and the second one indicates whether or not the chain is
// complete.
//
// A chain is done running if there are no more jobs in it that can run. This
// can happen if all of the jobs in the chain or complete, or if some or all
// of the jobs in the chain failed.
//
// A chain is complete if every job in it completed successfully.
func (c *chain) IsDone() (done bool, complete bool) {
done = true
complete = true
pendingJobs := proto.Jobs{}
// Loop through every job in the chain and act on its state. Keep
// track of the jobs that aren't running or in a finished state so
// that we can later check to see if they are capable of running.
LOOP:
for _, job := range c.JobChain.Jobs {
switch job.State {
case proto.STATE_RUNNING:
// If any jobs are running, the chain can't be done
// or complete, so return false for both now.
return false, false
case proto.STATE_COMPLETE:
// Move on to the next job.
continue LOOP
case proto.STATE_FAIL:
// do nothing
default:
// Any job that's not running, complete, or failed.
pendingJobs = append(pendingJobs, job)
}
// We can only arrive here if a job is not complete. If there
// is at least one job that is not complete, the whole chain is
// not complete. The chain could still be done, though, so we
// aren't ready to return yet.
complete = false
}
// For each pending job, check to see if all of its previous jobs
// completed. If they did, there's no reason the pending job can't run.
for _, job := range pendingJobs {
complete = false
allPrevComplete := true
for _, prevJob := range c.PreviousJobs(job.Id) {
if prevJob.State != proto.STATE_COMPLETE {
allPrevComplete = false
// We can break out of this loop if a single
// one of the previous jobs is not complete.
break
}
}
// If all of the previous jobs of a pending job are complete, the
// chain can't be complete because the pending job can still run.
if allPrevComplete == true {
return false, complete
}
}
return
}
// Validate checks if a job chain is valid. It returns an error if it's not.
func (c *chain) Validate() error {
// Make sure the adjacency list is valid.
if !c.adjacencyListIsValid() {
return ErrInvalidChain{
Message: "invalid adjacency list: some jobs exist in " +
"chain.AdjacencyList but not chain.Jobs",
}
}
// Make sure there is one first job.
_, err := c.FirstJob()
if err != nil {
return err
}
// Make sure there is one last job.
_, err = c.LastJob()
if err != nil {
return err
}
// Make sure there are no cycles.
if !c.isAcyclic() {
return ErrInvalidChain{Message: "chain is cyclic"}
}
return nil
}
// RequestId returns the request id of the job chain.
func (c *chain) RequestId() string {
return c.JobChain.RequestId
}
// JobState returns the state of a given job.
func (c *chain) JobState(jobId string) byte {
c.RLock() // -- lock
defer c.RUnlock() // -- unlock
return c.JobChain.Jobs[jobId].State
}
// Set the state of a job in the chain.
func (c *chain) SetJobState(jobId string, state byte) {
c.Lock() // -- lock
j := c.JobChain.Jobs[jobId]
j.State = state
c.JobChain.Jobs[jobId] = j
// Keep chain.Running up to date
if state == proto.STATE_RUNNING {
c.N += 1 // Nth job to run
// @todo: on sequence retry, we need to N-- for all jobs in the sequence
c.Running[jobId] = RunningJob{
N: c.N,
StartTs: time.Now().UnixNano(),
}
} else {
// STATE_RUNNING is the only running state, and it's not that, so the
// job must not be running.
delete(c.Running, jobId)
}
c.Unlock() // -- unlock
}
// SetState sets the chain's state.
func (c *chain) SetState(state byte) {
c.Lock() // -- lock
c.JobChain.State = state
c.Unlock() // -- unlock
}
// -------------------------------------------------------------------------- //
// indegreeCounts finds the indegree for each job in the chain.
func (c *chain) indegreeCounts() map[string]int {
indegreeCounts := make(map[string]int)
for job := range c.JobChain.Jobs {
indegreeCounts[job] = 0
}
for _, nextJobs := range c.JobChain.AdjacencyList {
for _, nextJob := range nextJobs {
if _, ok := indegreeCounts[nextJob]; ok {
indegreeCounts[nextJob] += 1
}
}
}
return indegreeCounts
}
// outdegreeCounts finds the outdegree for each job in the chain.
func (c *chain) outdegreeCounts() map[string]int {
outdegreeCounts := make(map[string]int)
for job := range c.JobChain.Jobs {
outdegreeCounts[job] = len(c.JobChain.AdjacencyList[job])
}
return outdegreeCounts
}
// isAcyclic returns whether or not a job chain is acyclic. It essentially
// works by moving through the job chain from the top (the first job)
// down to the bottom (the last job), and if there are any cycles in the
// chain (dependencies that go in the opposite direction...i.e., bottom to
// top), it returns false.
func (c *chain) isAcyclic() bool {
indegreeCounts := c.indegreeCounts()
queue := make(map[string]struct{})
// Add all of the first jobs to the queue (in reality there should
// only be 1).
for job, indegreeCount := range indegreeCounts {
if indegreeCount == 0 {
queue[job] = struct{}{}
}
}
jobsVisited := 0
for {
// Break when there are no more jobs in the queue. This happens
// when either there are no first jobs, or when a cycle
// prevents us from enqueuing a job below.
if len(queue) == 0 {
break
}
// Get a job from the queue.
var curJob string
for k := range queue {
curJob = k
}
delete(queue, curJob)
// Visit each job adjacent to the current job and decrement
// their indegree count by 1. When a job's indegree count
// becomes 0, add it to the queue.
//
// If there is a cycle somewhere, at least one jobs indegree
// count will never reach 0, and therefore it will never be
// enqueued and visited.
for _, adjJob := range c.JobChain.AdjacencyList[curJob] {
indegreeCounts[adjJob] -= 1
if indegreeCounts[adjJob] == 0 {
queue[adjJob] = struct{}{}
}
}
// Keep track of the number of jobs we've visited. If there is
// a cycle in the chain, we won't end up visiting some jobs.
jobsVisited += 1
}
if jobsVisited != len(c.JobChain.Jobs) {
return false
}
return true
}
// adjacencyListIsValid returns whether or not the chain's adjacency list is
// not valid. An adjacency list is not valid if any of the jobs in it do not
// exist in chain.Jobs.
func (c *chain) adjacencyListIsValid() bool {
for job, adjJobs := range c.JobChain.AdjacencyList {
if _, ok := c.JobChain.Jobs[job]; !ok {
return false
}
for _, adjJob := range adjJobs {
if _, ok := c.JobChain.Jobs[adjJob]; !ok {
return false
}
}
}
return true
}
// contains returns whether or not a slice of strings contains a specific string.
func contains(s []string, t string) bool {
for _, i := range s {
if i == t {
return true
}
}
return false
}
<file_sep>// Copyright 2017-2018, Square, Inc.
package main
import (
"log"
"github.com/square/spincycle/request-manager/app"
"github.com/square/spincycle/request-manager/server"
)
func main() {
err := server.Run(app.Defaults())
log.Fatal("Request Manager stopped: %s", err)
}
<file_sep>// Copyright 2017-2018, Square, Inc.
// Package server bootstraps the Request Manager.
package server
import (
"fmt"
"github.com/square/spincycle/config"
"github.com/square/spincycle/request-manager/api"
"github.com/square/spincycle/request-manager/app"
"github.com/square/spincycle/request-manager/joblog"
"github.com/square/spincycle/request-manager/request"
"github.com/square/spincycle/request-manager/status"
)
// Run runs the Request Manager API in the foreground. It returns when the API stops.
func Run(appCtx app.Context) error {
if err := loadConfig(&appCtx); err != nil {
return err
}
api, err := makeAPI(appCtx)
if err != nil {
return err
}
return api.Run()
}
type Server struct {
appCtx app.Context
api *api.API
}
func NewServer(appCtx app.Context) *Server {
return &Server{
appCtx: appCtx,
}
}
func (s *Server) Boot() error {
if s.api != nil {
return nil
}
if err := loadConfig(&s.appCtx); err != nil {
return err
}
api, err := makeAPI(s.appCtx)
if err != nil {
return err
}
s.api = api
return nil
}
func (s *Server) API() *api.API {
return s.api
}
// --------------------------------------------------------------------------
func loadConfig(appCtx *app.Context) error {
var err error
var cfg config.RequestManager
if appCtx.Hooks.LoadConfig != nil {
cfg, err = appCtx.Hooks.LoadConfig(*appCtx)
} else {
cfg, err = appCtx.Hooks.LoadConfig(*appCtx)
}
if err != nil {
return fmt.Errorf("error loading config at %s", err)
}
appCtx.Config = cfg
return nil
}
func makeAPI(appCtx app.Context) (*api.API, error) {
var err error
// //////////////////////////////////////////////////////////////////////
// Grapher Factory
// //////////////////////////////////////////////////////////////////////
grf, err := appCtx.Factories.MakeGrapher(appCtx)
if err != nil {
return nil, fmt.Errorf("error loading config at %s", err)
}
// //////////////////////////////////////////////////////////////////////
// Job Runner Client
// //////////////////////////////////////////////////////////////////////
jrc, err := appCtx.Factories.MakeJobRunnerClient(appCtx)
if err != nil {
return nil, fmt.Errorf("error loading config at %s", err)
}
// //////////////////////////////////////////////////////////////////////
// DB Connection Pool
// //////////////////////////////////////////////////////////////////////
dbc, err := appCtx.Factories.MakeDbConnPool(appCtx)
if err != nil {
return nil, fmt.Errorf("error loading config at %s", err)
}
// //////////////////////////////////////////////////////////////////////
// Request Manager, Job Log Store, and Job Chain Store
// //////////////////////////////////////////////////////////////////////
rm := request.NewManager(grf, dbc, jrc)
// //////////////////////////////////////////////////////////////////////
// API
// //////////////////////////////////////////////////////////////////////
stat := status.NewManager(dbc, jrc)
jls := joblog.NewStore(dbc)
return api.NewAPI(appCtx, rm, jls, stat), nil
}
|
b73502b075716ffd038df794c92f54e3fc3a1dc1
|
[
"TOML",
"SQL",
"Go"
] | 18
|
Go
|
fkorotkov/spincycle
|
75c07477f481e60866804747650a935fe9cec739
|
74bb27856530279cd077179d2c3d6395aedc8bf6
|
refs/heads/master
|
<repo_name>samblake/meerkat<file_sep>/src/main/kotlin/com/github/samblake/meerkat/services/BrowserService.kt
package com.github.samblake.meerkat.services
import com.github.samblake.meerkat.edge.Database.query
import com.github.samblake.meerkat.model.Browser
import com.github.samblake.meerkat.model.ViewBrowser
object BrowserService {
suspend fun all(baseUrl: String): List<ViewBrowser> = query {
Browser.all().toList().map { it.asViewModel(baseUrl) }
}
}<file_sep>/src/main/kotlin/com/github/samblake/meerkat/model/ViewModel.kt
package com.github.samblake.meerkat.model
import kotlin.reflect.KClass
import kotlin.reflect.full.memberProperties
abstract class ViewModel(val id: Int, val name: String, val description: String) {
abstract val baseUrl: String
abstract val icon: String
fun getInstanceUrl() = "${baseUrl}/${id}"
override fun equals(other: Any?): Boolean {
if (this === other) return true
if (javaClass != other?.javaClass) return false
other as ViewModel
return id == other.id
}
override fun hashCode(): Int {
return id
}
fun getListings(): Map<String, String> {
val fields = (this::class as KClass<ViewModel>).memberProperties.filter { it.annotations.any { it is Listing } }
return fields.map {
Pair((it.annotations.first { it is Listing } as Listing).name, it.get(this).toString())
}.toMap()
}
}
abstract class ViewType<T: ViewModel>(val name: String, val urlSegment: String, val icon: String)
@Target(AnnotationTarget.PROPERTY)
@Retention(AnnotationRetention.RUNTIME)
@MustBeDocumented
annotation class Listing(val name: String)<file_sep>/sql/init-mysql.sql
CREATE DATABASE IF NOT EXISTS meerkat;
USE meerkat;<file_sep>/src/main/kotlin/com/github/samblake/meerkat/model/Result.kt
package com.github.samblake.meerkat.model
import org.jetbrains.exposed.dao.EntityID
object Results : NamedTable("results") {
val name = varchar("name", length = 100)
val description = text("description")
val run = reference("run", Runs)
val case = reference("case", Cases)
}
class Result(id: EntityID<Int>) : NamedEntity<ViewResult>(id) {
companion object : NamedEntityClass<Result>(Results)
override var name by Results.name
var description by Results.description
var run by Results.run
var case by Results.case
override fun asViewModel(baseUrl: String) = ViewResult.from(this, baseUrl)
}
class ViewResult(id: Int, name: String, description: String,
override val baseUrl: String) : ViewModel(id, name, description) {
companion object : ViewType<ViewResult>("Results","results", "mdi-thumbs-up-down") {
fun from(result: Result, baseUrl: String): ViewResult = ViewResult(
result.id.value,
result.name,
result.description,
baseUrl
)
}
override val icon = ViewResult.icon
}<file_sep>/Dockerfile
FROM openjdk:8-jre-alpine
EXPOSE 7000
ENTRYPOINT ["/usr/bin/java", "-jar", "/opt/meerkat/meerkat.jar"]
ARG JAR_FILE
ADD target/meerkat-app.jar /opt/meerkat/meerkat.jar
ADD ./static /static
ADD ./sql /sql<file_sep>/src/main/kotlin/com/github/samblake/meerkat/services/ProjectService.kt
package com.github.samblake.meerkat.services
import com.github.samblake.meerkat.edge.Database
import com.github.samblake.meerkat.model.Project
import com.github.samblake.meerkat.model.ViewProject
object ProjectService {
suspend fun all(baseUrl: String): List<ViewProject> = Database.query {
Project.all().toList().map { it.asViewModel(baseUrl) }
}
}<file_sep>/pom.xml
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.github.samblake.meerkat</groupId>
<artifactId>meerkat</artifactId>
<version>1.0-SNAPSHOT</version>
<packaging>jar</packaging>
<name>com.github.samblake.meerkat meerkat</name>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<jvm.version>1.8</jvm.version>
<kotlin.version>1.6.0</kotlin.version>
<kotlin.code.style>official</kotlin.code.style>
<junit.version>4.13.1</junit.version>
<ktor.version>1.3.0</ktor.version>
</properties>
<dependencies>
<!-- Kotlin -->
<dependency>
<groupId>org.jetbrains.kotlin</groupId>
<artifactId>kotlin-stdlib</artifactId>
<version>${kotlin.version}</version>
</dependency>
<dependency>
<groupId>org.jetbrains.kotlin</groupId>
<artifactId>kotlin-test-junit</artifactId>
<version>${kotlin.version}</version>
<scope>test</scope>
</dependency>
<!-- Ktor -->
<dependency>
<groupId>io.ktor</groupId>
<artifactId>ktor-server-netty</artifactId>
<version>${ktor.version}</version>
</dependency>
<dependency>
<groupId>io.ktor</groupId>
<artifactId>ktor-gson</artifactId>
<version>${ktor.version}</version>
</dependency>
<dependency>
<groupId>io.ktor</groupId>
<artifactId>ktor-thymeleaf</artifactId>
<version>${ktor.version}</version>
</dependency>
<dependency>
<groupId>nz.net.ultraq.thymeleaf</groupId>
<artifactId>thymeleaf-layout-dialect</artifactId>
<version>2.0.5</version>
</dependency>
<!-- Logging -->
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>1.2.3</version>
</dependency>
<!-- Configuration -->
<dependency>
<groupId>com.uchuhimo</groupId>
<artifactId>konf</artifactId>
<version>0.22.1</version>
</dependency>
<!-- Database -->
<dependency>
<groupId>org.jetbrains.exposed</groupId>
<artifactId>exposed</artifactId>
<version>0.17.7</version>
</dependency>
<dependency>
<groupId>com.zaxxer</groupId>
<artifactId>HikariCP</artifactId>
<version>3.4.1</version>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>8.0.28</version>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
<version>2.1.210</version>
<scope>runtime</scope>
</dependency>
<!-- Selenium -->
<dependency>
<groupId>ru.yandex.qatools.ashot</groupId>
<artifactId>ashot</artifactId>
<version>1.5.2</version>
</dependency>
<dependency>
<groupId>io.github.bonigarcia</groupId>
<artifactId>webdrivermanager</artifactId>
<version>3.8.1</version>
</dependency>
<!-- Testing -->
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>${junit.version}</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<sourceDirectory>src/main/kotlin</sourceDirectory>
<testSourceDirectory>src/test/kotlin</testSourceDirectory>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.8.1</version>
<configuration>
<source>${jvm.version}</source>
<target>${jvm.version}</target>
</configuration>
</plugin>
<plugin>
<groupId>org.jetbrains.kotlin</groupId>
<artifactId>kotlin-maven-plugin</artifactId>
<version>${kotlin.version}</version>
<executions>
<execution>
<id>compile</id>
<phase>compile</phase>
<goals>
<goal>compile</goal>
</goals>
</execution>
<execution>
<id>test-compile</id>
<phase>test-compile</phase>
<goals>
<goal>test-compile</goal>
</goals>
</execution>
</executions>
<configuration>
<jvmTarget>${jvm.version}</jvmTarget>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>3.2.1</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<shadedArtifactAttached>true</shadedArtifactAttached>
<transformers>
<transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
<mainClass>com.github.samblake.meerkat.MeerkatKt</mainClass>
</transformer>
</transformers>
<filters>
<filter>
<!-- filter out signature files from signed dependencies and module info -->
<artifact>*:*</artifact>
<excludes>
<exclude>META-INF/*.SF</exclude>
<exclude>META-INF/*.DSA</exclude>
<exclude>META-INF/*.RSA</exclude>
<exclude>META-INF/versions/9/module-info.class</exclude>
</excludes>
</filter>
</filters>
</configuration>
</execution>
</executions>
<configuration>
<finalName>${project.build.finalName}-app</finalName>
</configuration>
</plugin>
<plugin>
<groupId>com.gitlab.haynes</groupId>
<artifactId>libsass-maven-plugin</artifactId>
<version>0.2.22</version>
<executions>
<execution>
<phase>generate-resources</phase>
<goals>
<goal>compile</goal>
</goals>
</execution>
</executions>
<configuration>
<inputPath>${basedir}/src/main/sass</inputPath>
<outputPath>${basedir}/static/css</outputPath>
</configuration>
</plugin>
</plugins>
<finalName>meerkat</finalName>
</build>
<repositories>
<repository>
<id>exposed</id>
<name>exposed</name>
<url>https://dl.bintray.com/kotlin/exposed</url>
</repository>
</repositories>
</project>
<file_sep>/README.md
# Meerkat

Automated cross browser image comparison.
Logo made by [Freepik](https://www.flaticon.com/authors/freepik) from [www.flaticon.com](https://www.flaticon.com/)
<file_sep>/src/main/kotlin/com/github/samblake/meerkat/model/Case.kt
package com.github.samblake.meerkat.model
import org.jetbrains.exposed.dao.EntityID
object Cases : NamedTable("cases") {
val name = varchar("name", length = 100)
val description = text("description")
val project = reference("project", Projects)
val path = text("path")
}
class Case(id: EntityID<Int>) : NamedEntity<ViewCase>(id) {
companion object : NamedEntityClass<Case>(Cases)
override var name by Cases.name
var description by Cases.description
var project by Project referencedOn Cases.project
var path by Cases.path
override fun asViewModel(baseUrl: String) = ViewCase.from(this, baseUrl)
}
class ViewCase(id: Int, name: String, description: String,
@Listing("Path") val path: String, val project: Project,
override val baseUrl: String) : ViewModel(id, name, description) {
companion object : ViewType<ViewCase>("Cases","cases", "mdi-bookmark-check") {
fun from(case: Case, baseUrl: String): ViewCase {
return ViewCase(
case.id.value,
case.name,
case.description,
case.path,
case.project,
baseUrl
)
}
}
override val icon = ViewCase.icon
}<file_sep>/sql/init.sql
CREATE TABLE `projects` (
`id` INT NOT NULL AUTO_INCREMENT,
`name` VARCHAR(100) NOT NULL,
`description` TEXT NOT NULL,
`base` VARCHAR(255) NOT NULL,
PRIMARY KEY (`id`)
);
CREATE TABLE `browsers` (
`id` INT NOT NULL AUTO_INCREMENT,
`name` VARCHAR(100) NOT NULL,
`description` TEXT NOT NULL,
`client` VARCHAR(10) NOT NULL,
`width` INT NOT NULL,
`height` INT NOT NULL,
`additional_config` TEXT NULL,
PRIMARY KEY (`id`)
);
CREATE TABLE `cases` (
`id` INT NOT NULL AUTO_INCREMENT,
`name` VARCHAR(100) NOT NULL,
`description` TEXT NOT NULL,
`project` INT NOT NULL,
`path` TEXT NOT NULL,
PRIMARY KEY (`id`),
CONSTRAINT `fk_cases_project_id` FOREIGN KEY (`project`) REFERENCES projects(id) ON DELETE RESTRICT ON UPDATE RESTRICT
);
CREATE TABLE `scenarios` (
`id` INT NOT NULL AUTO_INCREMENT,
`name` VARCHAR(100) NOT NULL,
`description` TEXT NOT NULL,
`project` INT NOT NULL,
PRIMARY KEY (`id`),
CONSTRAINT `fk_scenarios_project_id` FOREIGN KEY (`project`) REFERENCES projects(id) ON DELETE RESTRICT ON UPDATE RESTRICT
);
CREATE TABLE scenario_cases (
`id` INT NOT NULL AUTO_INCREMENT,
`scenario` INT NOT NULL,
`case` INT NOT NULL,
PRIMARY KEY (`id`),
CONSTRAINT `fk_scenario_cases_scenario_id` FOREIGN KEY (`scenario`) REFERENCES scenarios(id) ON DELETE RESTRICT ON UPDATE RESTRICT,
CONSTRAINT `fk_scenario_cases_case_id` FOREIGN KEY (`case`) REFERENCES cases(id) ON DELETE RESTRICT ON UPDATE RESTRICT
);
CREATE TABLE runs (
`id` INT NOT NULL AUTO_INCREMENT,
`name` VARCHAR(100) NOT NULL,
`description` TEXT NOT NULL,
PRIMARY KEY (`id`)
);
CREATE TABLE results (
`id` INT NOT NULL AUTO_INCREMENT,
`name` VARCHAR(100) NOT NULL,
`description` TEXT NOT NULL,
`run` INT NOT NULL,
`case` INT NOT NULL,
PRIMARY KEY (`id`),
CONSTRAINT `fk_results_run_id` FOREIGN KEY (`run`) REFERENCES runs(id) ON DELETE RESTRICT ON UPDATE RESTRICT,
CONSTRAINT `fk_results_case_id` FOREIGN KEY (`case`) REFERENCES cases(id) ON DELETE RESTRICT ON UPDATE RESTRICT
);<file_sep>/src/main/kotlin/com/github/samblake/meerkat/model/Project.kt
package com.github.samblake.meerkat.model
import org.jetbrains.exposed.dao.EntityID
object Projects : NamedTable("projects") {
val name = varchar("name", length = 100)
val description = text("description")
val base = varchar("base", length = 255)
}
class Project(id: EntityID<Int>) : NamedEntity<ViewProject>(id) {
companion object : NamedEntityClass<Project>(Projects)
override var name by Projects.name
var description by Projects.description
var base by Projects.base
val scenarios by Scenario referrersOn Scenarios.project
val cases by Case referrersOn Cases.project
override fun asViewModel(baseUrl: String) = ViewProject.from(this, baseUrl)
}
class ViewProject(id: Int, name: String, description: String, @Listing("Base") val base: String,
override val baseUrl: String) : ViewModel(id, name, description) {
companion object : ViewType<ViewProject>("Projects","projects", "mdi-clipboard-outline") {
fun from(project: Project, baseUrl: String): ViewProject {
return ViewProject(
project.id.value,
project.name,
project.description,
project.base,
baseUrl
)
}
}
override val icon = ViewProject.icon
}<file_sep>/src/main/kotlin/com/github/samblake/meerkat/services/ScenarioService.kt
package com.github.samblake.meerkat.services
import com.github.samblake.meerkat.edge.Database
import com.github.samblake.meerkat.model.Project
import com.github.samblake.meerkat.model.Scenario
import com.github.samblake.meerkat.model.Scenarios
import com.github.samblake.meerkat.model.ViewScenario
object ScenarioService {
suspend fun all(project: Project, baseUrl: String): List<ViewScenario> = Database.query {
Scenario.find{ Scenarios.project eq project.id }.toList().map { it.asViewModel(baseUrl) }
}
}<file_sep>/src/main/kotlin/com/github/samblake/meerkat/model/Run.kt
package com.github.samblake.meerkat.model
import org.jetbrains.exposed.dao.EntityID
object Runs : NamedTable("runs") {
val name = varchar("name", length = 100)
val description = text("description")
}
class Run(id: EntityID<Int>) : NamedEntity<ViewRun>(id) {
companion object : NamedEntityClass<Run>(Runs)
override var name by Runs.name
var description by Runs.description
override fun asViewModel(baseUrl: String) = ViewRun.from(this, baseUrl)
}
class ViewRun(id: Int, name: String, description: String,
override val baseUrl: String) : ViewModel(id, name, description) {
companion object : ViewType<ViewRun>("Runs","runs", "mdi-play-circle") {
fun from(run: Run, baseUrl: String): ViewRun = ViewRun(
run.id.value,
run.name,
run.description,
baseUrl
)
}
override val icon = ViewRun.icon
}<file_sep>/src/main/kotlin/com/github/samblake/meerkat/model/NamedEntity.kt
package com.github.samblake.meerkat.model
import org.jetbrains.exposed.dao.*
abstract class NamedEntity<T:Any>(id: EntityID<Int>) : IntEntity(id) {
abstract var name:String
abstract fun asViewModel(baseUrl: String):T
}
abstract class NamedTable(name: String = "", columnName: String = "id") : IntIdTable(name, columnName)
abstract class NamedEntityClass<out E:NamedEntity<out Any>>(table: IdTable<Int>, entityType: Class<E>? = null) : IntEntityClass<E>(table, entityType)<file_sep>/src/main/kotlin/com/github/samblake/meerkat/edge/Database.kt
package com.github.samblake.meerkat.edge
import com.zaxxer.hikari.HikariConfig
import com.zaxxer.hikari.HikariDataSource
import org.jetbrains.exposed.sql.Database
import org.jetbrains.exposed.sql.transactions.experimental.newSuspendedTransaction
object Database {
fun init() = with (Configuration) {
Database.connect(hikari(jdbcDriver(), jdbcUrl(), jdbcUsername(), jdbcPassword()))
}
private fun hikari(driverClassName: String, jdbcUrl: String, username: String, password: String): HikariDataSource {
val config = HikariConfig()
config.driverClassName = driverClassName
config.jdbcUrl = jdbcUrl
config.username = username
config.password = <PASSWORD>
config.maximumPoolSize = 3
config.isAutoCommit = false
config.transactionIsolation = "TRANSACTION_REPEATABLE_READ"
config.validate()
return HikariDataSource(config)
}
suspend fun <T> query(block: suspend () -> T): T = newSuspendedTransaction { block() }
}
<file_sep>/src/main/kotlin/com/github/samblake/meerkat/model/ScenarioCase.kt
package com.github.samblake.meerkat.model
import org.jetbrains.exposed.dao.EntityID
import org.jetbrains.exposed.dao.IntEntity
import org.jetbrains.exposed.dao.IntEntityClass
import org.jetbrains.exposed.dao.IntIdTable
object ScenarioCases : IntIdTable("scenario_cases") {
val scenario = reference("scenario", Scenarios)
val case = reference("case", Cases)
}
class ScenarioCase(id: EntityID<Int>) : IntEntity(id) {
companion object : IntEntityClass<ScenarioCase>(ScenarioCases)
var scenario by Scenario referencedOn ScenarioCases.scenario
var case by Case referencedOn ScenarioCases.case
}<file_sep>/docker-compose.yaml
version: '3'
services:
db:
image: mysql:5.7
volumes:
- ./sql/init-mysql.sql:/docker-entrypoint-initdb.d/1.sql
- ./sql/init.sql:/docker-entrypoint-initdb.d/2.sql
- ./sql/example.sql:/docker-entrypoint-initdb.d/3.sql
restart: always
command: ['--character-set-server=utf8mb4', '--collation-server=utf8mb4_unicode_ci']
environment:
MYSQL_ROOT_PASSWORD: meerkat
MYSQL_DATABASE: meerkat
MYSQL_USER: meerkat
MYSQL_PASSWORD: meerkat
ports:
- 3306:3306
web:
build: .
environment:
MEERKAT_JDBC_DRIVER: com.mysql.cj.jdbc.Driver
MEERKAT_JDBC_URL: jdbc:mysql://meerkat:meerkat@db:3306/meerkat?useUnicode=true&characterEncoding=utf8
ports:
- "7070:7000"
depends_on:
- db
<file_sep>/src/main/kotlin/com/github/samblake/meerkat/menu/Menu.kt
package com.github.samblake.meerkat.menu
import com.github.samblake.meerkat.model.ViewType
class Menu(val sections: List<Section>) {
fun findSelectedItem(crumbs: List<String>): Item? {
return sections.map { it.findSelectedItem(crumbs) }.filterNotNull().firstOrNull()
?: sections.firstOrNull()?.items?.firstOrNull()
}
}
class Section(val name: String, val items: List<Item>) {
fun findSelectedItem(crumbs: List<String>) : Item? = items.firstOrNull { it.isSelected(crumbs) }
}
class Item(val name: String, val url: String, val icon: String) {
constructor(viewType: ViewType<*>) : this(viewType, "/")
constructor(viewType: ViewType<*>, baseUrl: String)
: this(viewType.name, baseUrl + viewType.urlSegment, viewType.icon)
fun isSelected(crumbs: List<String>): Boolean = crumbs.any { it == name }
}
class ViewMenu(val menu: Menu, val selected: Item?) {
fun isSelected(item: Item): Boolean = item == selected
val sections = menu.sections
}<file_sep>/src/main/kotlin/com/github/samblake/meerkat/model/Browser.kt
package com.github.samblake.meerkat.model
import org.jetbrains.exposed.dao.EntityID
object Browsers : NamedTable("browsers") {
val name = varchar("name", length = 100)
val description = text("description")
val client = enumerationByName("client", 10, Clients::class)
val width = integer("width")
val height = integer("height")
val additionalConfig = text("additional_config").nullable()
}
class Browser(id: EntityID<Int>) : NamedEntity<ViewBrowser>(id) {
companion object : NamedEntityClass<Browser>(Browsers)
override var name by Browsers.name
var description by Browsers.description
val client by Browsers.client
val width by Browsers.width
val height by Browsers.height
val additionalConfig by Browsers.additionalConfig
override fun asViewModel(baseUrl: String) = ViewBrowser.from(this, baseUrl)
}
class ViewBrowser(id: Int, name: String, description: String, @Listing("Client") val client: Clients,
@Listing("Width") val width: Int, @Listing("Height")val height: Int,
val additionalConfig: String?, override val baseUrl: String) : ViewModel(id, name, description) {
companion object : ViewType<ViewBrowser>("Browsers", "browsers", "mdi-cellphone-link") {
fun from(browser: Browser, baseUrl: String): ViewBrowser {
return ViewBrowser(
browser.id.value,
browser.name,
browser.description,
browser.client,
browser.width,
browser.height,
browser.additionalConfig,
baseUrl
)
}
}
override val icon = ViewBrowser.icon
}
enum class Clients {
Chrome, Firefox, Safari
}<file_sep>/src/main/kotlin/com/github/samblake/meerkat/crumbs/CrumbRoute.kt
package com.github.samblake.meerkat.crumbs
import com.github.samblake.meerkat.edge.Database.query
import com.github.samblake.meerkat.model.NamedEntity
import com.github.samblake.meerkat.model.NamedEntityClass
import io.ktor.application.ApplicationCall
import io.ktor.application.ApplicationCallPipeline
import io.ktor.routing.Route
import io.ktor.routing.RouteSelector
import io.ktor.routing.RouteSelectorEvaluation
import io.ktor.routing.RouteSelectorEvaluation.Companion.Constant
import io.ktor.routing.RoutingResolveContext
import io.ktor.util.AttributeKey
import io.ktor.util.pipeline.ContextDsl
import io.ktor.util.pipeline.PipelineContext
object Crumb {
val crumbs = AttributeKey<MutableList<String>>("crumbs")
val title = AttributeKey<String>("title")
val entity = AttributeKey<NamedEntity<out Any>>("entity")
val parent = AttributeKey<NamedEntity<out Any>>("parent")
}
@ContextDsl
fun Route.crumb(name: String, callback: Route.() -> Unit): Route {
val routeWithCrumb = this.createChild(object : RouteSelector(1.0) {
override fun evaluate(context: RoutingResolveContext, segmentIndex: Int): RouteSelectorEvaluation = Constant
override fun toString(): String = "crumb($name)"
})
// Intercepts calls from this route at the features step
routeWithCrumb.intercept(ApplicationCallPipeline.Features) {
populateCrumbs(name)
}
// Configure this route with the block provided by the user
callback(routeWithCrumb)
return routeWithCrumb
}
@ContextDsl
fun <T:NamedEntity<out Any>>Route.crumb(entityClass: NamedEntityClass<T>, callback: Route.() -> Unit): Route {
val routeWithCrumb = this.createChild(object : RouteSelector(1.0) {
override fun evaluate(context: RoutingResolveContext, segmentIndex: Int): RouteSelectorEvaluation = Constant
})
// Intercepts calls from this route at the features step
routeWithCrumb.intercept(ApplicationCallPipeline.Features) {
val ids = context.request.call.parameters.getAll("id")
val id = Integer.parseInt(ids?.last())
query { entityClass.findById(id) } ?.let {
context.request.call.attributes.getOrNull(Crumb.entity)?.let {
context.request.call.attributes.put(Crumb.parent, it)
}
context.request.call.attributes.put(Crumb.entity, it)
populateCrumbs(it.name)
}
}
// Configure this route with the block provided by the user
callback(routeWithCrumb)
return routeWithCrumb
}
private fun PipelineContext<Unit, ApplicationCall>.populateCrumbs(name: String) {
val crumbs = context.request.call.attributes.getOrNull(Crumb.crumbs) ?: ArrayList()
crumbs.add(name)
context.request.call.attributes.put(Crumb.crumbs, crumbs)
context.request.call.attributes.put(Crumb.title, name)
}<file_sep>/src/main/kotlin/com/github/samblake/meerkat/edge/Configuration.kt
package com.github.samblake.meerkat.edge
import com.uchuhimo.konf.Config
import com.uchuhimo.konf.ConfigSpec
import com.uchuhimo.konf.Feature.OPTIONAL_SOURCE_BY_DEFAULT
object Configuration {
private val SCHEMA = "meerkat"
private val DEFAULT_DRIVER_CLASS_NAME = "org.h2.Driver"
private val DEFAULT_JDBC_URL = "jdbc:h2:mem:test;INIT=" +
"RUNSCRIPT FROM './sql/init-h2.sql'\\;" +
"RUNSCRIPT FROM './sql/init.sql'\\;" +
"RUNSCRIPT FROM './sql/example.sql'\\;"
object MeerkatSpec : ConfigSpec() {
object WebSpec : ConfigSpec() {
val static by optional("static")
}
object JdbcSpec : ConfigSpec() {
val driver by optional(DEFAULT_DRIVER_CLASS_NAME)
val url by optional(DEFAULT_JDBC_URL)
val username by optional(SCHEMA)
val password by optional(SCHEMA)
}
}
val config = Config { addSpec(MeerkatSpec) }
.enable(OPTIONAL_SOURCE_BY_DEFAULT)
.from.properties.resource("meerkat.properties")
.from.env()
.from.systemProperties()
fun static() = config[MeerkatSpec.WebSpec.static]
fun jdbcDriver() = config[MeerkatSpec.JdbcSpec.driver]
fun jdbcUrl() = config[MeerkatSpec.JdbcSpec.url]
fun jdbcUsername() = config[MeerkatSpec.JdbcSpec.username]
fun jdbcPassword() = config[MeerkatSpec.JdbcSpec.password]
}<file_sep>/src/main/kotlin/com/github/samblake/meerkat/selenium/SeleniumService.kt
package com.github.samblake.meerkat.selenium
import io.github.bonigarcia.wdm.WebDriverManager
import org.openqa.selenium.chrome.ChromeDriver
import ru.yandex.qatools.ashot.AShot
import ru.yandex.qatools.ashot.shooting.ShootingStrategies
import java.nio.file.Paths
import javax.imageio.ImageIO
fun main() {
SeleniumService().screenshot("http://www.boardgamegeek.com")
}
class SeleniumService {
val strategy = ShootingStrategies.viewportPasting(100);
fun screenshot(url: String) {
WebDriverManager.chromedriver().setup()
val driver = ChromeDriver()
driver.get(url)
val fpScreenshot = AShot().shootingStrategy(strategy).takeScreenshot(driver);
val dir = Paths.get("/home/sam/Pictures/ss/")
dir.toFile().exists().not().run { dir.toFile().mkdirs() }
val file = dir.resolve("bgg.png").toFile()
ImageIO.write(fpScreenshot.getImage(),"PNG", file);
driver.quit()
}
}<file_sep>/sql/init-h2.sql
CREATE SCHEMA IF NOT EXISTS meerkat;
SET SCHEMA meerkat;<file_sep>/sql/example.sql
INSERT INTO projects (id, name, description, base) VALUES (1, 'Google', 'A basic example.', 'https://www.google.com');
INSERT INTO browsers (id, name, description, client, width, height)
VALUES (1, 'Desktop', 'Chrome desktop (21.5" monitor)', 'Chrome', 1920, 1080);
INSERT INTO browsers (id, name, description, client, width, height)
VALUES (2, 'Mobile', 'Chrome mobile (iPhone 7)', 'Chrome', 750, 1334);
INSERT INTO cases (id, name, description, project, path)
VALUES (1, 'Home', 'The home page', 1, '/');
INSERT INTO scenarios (id, name, description, project)
VALUES (1, 'Example', 'A simple example scenario', 1);
<file_sep>/src/main/kotlin/com/github/samblake/meerkat/Meerkat.kt
package com.github.samblake.meerkat
import com.github.samblake.meerkat.crumbs.Crumb.crumbs
import com.github.samblake.meerkat.crumbs.Crumb.entity
import com.github.samblake.meerkat.crumbs.Crumb.title
import com.github.samblake.meerkat.crumbs.crumb
import com.github.samblake.meerkat.edge.Configuration
import com.github.samblake.meerkat.edge.Database
import com.github.samblake.meerkat.menu.Item
import com.github.samblake.meerkat.menu.Menu
import com.github.samblake.meerkat.menu.Section
import com.github.samblake.meerkat.menu.ViewMenu
import com.github.samblake.meerkat.model.*
import com.github.samblake.meerkat.services.BrowserService
import com.github.samblake.meerkat.services.ProjectService
import com.github.samblake.meerkat.services.ScenarioService
import io.ktor.application.ApplicationCall
import io.ktor.application.call
import io.ktor.application.install
import io.ktor.features.CallLogging
import io.ktor.features.Compression
import io.ktor.features.ContentNegotiation
import io.ktor.features.DefaultHeaders
import io.ktor.gson.gson
import io.ktor.http.ContentType
import io.ktor.http.content.files
import io.ktor.http.content.static
import io.ktor.request.contentType
import io.ktor.request.uri
import io.ktor.response.respond
import io.ktor.routing.get
import io.ktor.routing.route
import io.ktor.routing.routing
import io.ktor.server.engine.embeddedServer
import io.ktor.server.netty.Netty
import io.ktor.thymeleaf.Thymeleaf
import io.ktor.thymeleaf.ThymeleafContent
import io.ktor.util.AttributeKey
import io.ktor.util.pipeline.PipelineContext
import nz.net.ultraq.thymeleaf.LayoutDialect
import org.thymeleaf.templateresolver.ClassLoaderTemplateResolver
import java.text.DateFormat.LONG
fun main() {
val staticDir = Configuration.static()
Database.init()
embeddedServer(Netty, port = 7000) {
install(DefaultHeaders)
install(Compression)
install(CallLogging)
install(ContentNegotiation) {
gson {
setDateFormat(LONG)
setPrettyPrinting()
}
}
install(Thymeleaf) {
setTemplateResolver(ClassLoaderTemplateResolver().apply {
prefix = "templates/"
suffix = ".html"
characterEncoding = "utf-8"
addDialect(LayoutDialect())
})
}
routing {
static("css") {
files("${staticDir}/css")
}
static("js") {
files("${staticDir}/js")
}
route("") { crumb("Meerkat") {
get {
call.respond(ThymeleafContent("index", mapOf(
attrTo(title), attrTo(crumbs), "menu" to generateMenu()
)))
}
with (ViewProject) { route(urlSegment) { crumb(name) {
get {
val projects = ProjectService.all(url())
listCall(projects)
}
route("{id}") { crumb(Project) {
get {
val project = attr(entity).asViewModel(url())
viewCall(project)
}
with (ViewScenario) { route(urlSegment) { crumb(name) {
get {
val project = attr(entity) as Project
val scenarios = ScenarioService.all(project, url())
listCall(scenarios)
}
route("{id}") { crumb(Scenario) {
get {
val scenario = attr(entity).asViewModel(url())
viewCall(scenario)
}
}
}}}}
}
}}}}
with (ViewBrowser) { route(urlSegment) { crumb(name) {
get {
val browsers = BrowserService.all(url())
listCall(browsers)
}
route("{id}") { crumb(Browser) {
get {
val browser = attr(entity).asViewModel(url())
viewCall(browser)
}
}}
}}}
}}
}
}.start(wait = true)
}
private fun PipelineContext<Unit, ApplicationCall>.url() = call.request.uri
private suspend fun PipelineContext<Unit, ApplicationCall>.viewCall(browser: Any) {
when (call.request.contentType()) {
ContentType.Application.Json -> call.respond(browser)
else -> call.respond(ThymeleafContent("generic/view", viewMap(browser)))
}
}
private suspend fun PipelineContext<Unit, ApplicationCall>.listCall(projects: List<Any>) {
when (call.request.contentType()) {
ContentType.Application.Json -> call.respond(projects)
else -> call.respond(ThymeleafContent("generic/list", listMap(projects)))
}
}
private fun PipelineContext<Unit, ApplicationCall>.listMap(entities: List<Any>): Map<String, Any> = mapOf(
attrTo(title),
attrTo(crumbs),
"entities" to entities,
"menu" to generateMenu()
)
private fun PipelineContext<Unit, ApplicationCall>.viewMap(entity: Any): Map<String, Any> = mapOf(
attrTo(title),
attrTo(crumbs),
"entity" to entity,
"menu" to generateMenu()
)
val menu = Menu(listOf(
Section("General", listOf(Item("Home", "/", "mdi-home"))),
Section("Setup", listOf(Item(ViewProject), Item(ViewBrowser))),
Section("Runs", listOf())
))
private fun PipelineContext<Unit, ApplicationCall>.generateMenu(): ViewMenu {
val crumbs = attr(crumbs)
val selectedItem = menu.findSelectedItem(crumbs)
return ViewMenu(menu, selectedItem)
}
private fun <T:Any>PipelineContext<Unit, ApplicationCall>.attr(key: AttributeKey<T>): T =
context.request.call.attributes.get(key)
private fun <T:Any>PipelineContext<Unit, ApplicationCall>.attrTo(key: AttributeKey<T>): Pair<String, T> =
key.name to attr(key)
<file_sep>/src/main/kotlin/com/github/samblake/meerkat/model/Scenario.kt
package com.github.samblake.meerkat.model
import org.jetbrains.exposed.dao.EntityID
object Scenarios : NamedTable("scenarios") {
val name = varchar("name", length = 100)
val description = text("description")
val project = reference("project", Projects)
}
class Scenario(id: EntityID<Int>) : NamedEntity<ViewScenario>(id) {
companion object : NamedEntityClass<Scenario>(Scenarios)
override var name by Scenarios.name
var description by Scenarios.description
var project by Project referencedOn Scenarios.project
var cases by Case via ScenarioCases
override fun asViewModel(baseUrl: String) = ViewScenario.from(this, baseUrl)
}
class ViewScenario(id: Int, name: String, description: String,
override val baseUrl: String) : ViewModel(id, name, description) {
companion object : ViewType<ViewScenario>("Scenarios","scenarios", "mdi-playlist-check") {
fun from(scenario: Scenario, baseUrl: String): ViewScenario {
return ViewScenario(
scenario.id.value,
scenario.name,
scenario.description,
baseUrl
)
}
}
override val icon = ViewScenario.icon
}
|
c386327fed1b5da9313e10661c2f346945f2f305
|
[
"SQL",
"YAML",
"Markdown",
"Maven POM",
"Dockerfile",
"Kotlin"
] | 26
|
Kotlin
|
samblake/meerkat
|
2cb0eec7ebec8dfca95aab2ba7e201390917eb6b
|
d0b1b76d4081bcd6527f5b550679715eb52a8f46
|
refs/heads/master
|
<repo_name>llucasmarques/Double-Linked-List<file_sep>/main.c
#include <stdio.h>
#include <stdlib.h>
#include "inverte.c"
int main(){
tipoLista *lista;
lista = NULL;
//Inserções
insereFim(&lista,1);
insereFim(&lista,2);
insereFim(&lista,3);
insereFim(&lista,4);
printf("Lista original: \n");
imprimeLista(lista);
printf("\n\n\n");
printf("Lista após a inversão: \n");
inversao(&lista);
imprimeLista(lista);
printf("\n\n\n");
return 1;
}
<file_sep>/inverte.c
#ifndef LISTA_C
#define LISTA_C
#include <stdio.h>
#include <stdlib.h>
//Definição do tipo nó
struct estruturaLista{
int dado;
struct estruturaLista *prox;
struct estruturaLista *ant;
};
typedef struct estruturaLista tipoLista;
//prototipos
void insereFim(tipoLista **lst,int valor);
tipoLista* alocaNo(int valor);
void imprimeLista(tipoLista *lst);
void inversao(tipoLista **lst);
//Função que faz a inserção no fim
void insereFim(tipoLista **lst,int valor){
tipoLista *novoNO,*aux;
novoNO = NULL;
//se caso nao tiver nada
if(*lst == NULL){
novoNO = alocaNo(valor);//aloca esse valor
*lst = novoNO;
//se tover mais valores
}else{
novoNO = alocaNo(valor);//aloca
aux = *lst;//auxiliar receber a primeiro elemento
while(aux->prox != NULL){//vai indo para o proximo ate achar a ultima posição
aux = aux->prox;
}
novoNO->ant = aux;//recebe o auxiliar que eh o atual ultimo
aux->prox = novoNO;// o atual ultimo recebe o novo valor
}
}
//Função que realiza a alocação de um novo nó
tipoLista* alocaNo(int valor){
tipoLista *novoNo;
novoNo = novoNo = (tipoLista*) malloc(sizeof(tipoLista));
if(novoNo){
novoNo->dado = valor;
novoNo->prox = NULL;
}
return novoNo;
}
//Função para impressao da lista
void imprimeLista(tipoLista *lst){
while(lst != NULL){
printf("[%d]",lst->dado);
lst = (tipoLista *) lst->prox;
}
printf("\n");
}
void inversao(tipoLista **lst){
tipoLista *aux, *aux2;
aux = (*lst)->prox;//inicializando os ponteiros aux
aux2 = (*lst)->prox->prox;// inicializa na frente de aux
(*lst)->ant = (*lst)->prox;//primeiro no
(*lst)->prox = NULL;
while(aux2 != NULL){
//modificando os do meio
aux->prox = aux->ant;
aux->ant = aux2;
//andando com os auxiliares
aux = aux2;
aux2 = aux2->prox;
}
//trabalhando
aux->prox = aux->ant;
aux->ant = NULL;
(*lst) = aux;
}
#endif
|
9fad3613520529c725652a072d6dcd59ceeaac3e
|
[
"C"
] | 2
|
C
|
llucasmarques/Double-Linked-List
|
f27bb0eb05f1e6e059d4b5721f9050d050d80b6b
|
44db048c555dba58e79a3a72b8c17e6a2247a2a2
|
refs/heads/master
|
<file_sep>package com.paruspringapp.jpa.models;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
@Entity
@JsonIgnoreProperties({ "hibernateLazyInitializer", "handler" })
public class Person {
@Id
@GeneratedValue
private long id;
private String firstname;
private String lastname;
public Person() {
}
public Person(final String firstname, final String lastname) {
this.firstname = firstname;
this.lastname = lastname;
}
public long getId() {
return id;
}
public void setId(final long id) {
this.id = id;
}
public String getFirstname() {
return firstname;
}
public void setFirstname(final String firstname) {
this.firstname = firstname;
}
public String getLastname() {
return lastname;
}
public void setLastname(final String lastname) {
this.lastname = lastname;
}
}
<file_sep>package com.paruspringapp.services.rest;
import java.util.List;
import java.util.logging.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.CrossOrigin;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import com.paruspringapp.exceptions.PersonException;
import com.paruspringapp.jpa.models.Person;
import com.paruspringapp.services.business.PersonService;
import com.paruspringapp.services.models.Response;
import com.paruspringapp.util.PayloadValidator;
@RestController
@RequestMapping("/persons")
public class PersonRestService {
private static Logger LOGGER = Logger.getLogger(PersonRestService.class.getName());
@Autowired
private PersonService personServiceImpl;
@GetMapping
public ResponseEntity<List<Person>> getPersons() throws PersonException {
List<Person> persons = personServiceImpl.getPersons();
if (persons.isEmpty())
throw new PersonException("No records found.");
return new ResponseEntity<>(persons, HttpStatus.OK);
}
@CrossOrigin(origins = "http://localhost:4200")
@GetMapping(path = "/{id}")
public ResponseEntity<Person> getPersonById(@PathVariable("id") final long id) throws PersonException {
LOGGER.info("getPersonById id -> " + id);
Person person = personServiceImpl.getPersonById(id);
if (person == null || person.getId() <= 0)
throw new PersonException("Record does not exist.");
return new ResponseEntity<>(person, HttpStatus.OK);
}
@PostMapping
public ResponseEntity<Person> savePerson(@RequestBody final Person person) throws PersonException {
LOGGER.info("savePerson person -> " + person);
if (!PayloadValidator.validatePayload(person))
throw new PersonException("Payload malformed, id must not be defined");
return new ResponseEntity<>(personServiceImpl.savePerson(person), HttpStatus.OK);
}
@PutMapping
public ResponseEntity<Person> updatePerson(@RequestBody final Person person) throws PersonException {
LOGGER.info("updatePerson person -> " + person);
Person checkperson = personServiceImpl.getPersonById(person.getId());
if (checkperson == null || checkperson.getId() <= 0)
throw new PersonException("Record does not exist.");
return new ResponseEntity<>(personServiceImpl.updatePerson(person), HttpStatus.OK);
}
@DeleteMapping(path = "/{id}")
public ResponseEntity<Response> deletePerson(@PathVariable("id") final long id) throws PersonException {
LOGGER.info("deletePerson id -> " + id);
Person checkperson = personServiceImpl.getPersonById(id);
LOGGER.info("deletePerson id -> " + checkperson);
if (checkperson == null || checkperson.getId() <= 0)
throw new PersonException("Record does not exist.");
personServiceImpl.deletePerson(Long.valueOf(id));
return new ResponseEntity<Response>(new Response(HttpStatus.OK.value(), "Person has been deleted"),
HttpStatus.OK);
}
}
<file_sep>package com.paruspringapp.util;
import com.paruspringapp.jpa.models.Person;
public class PayloadValidator {
public static boolean validatePayload(final Person person) {
return person.getId() == 0;
}
}
<file_sep>package com.paruspringapp;
import java.util.logging.Logger;
import org.springframework.boot.CommandLineRunner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
import com.paruspringapp.jpa.models.Person;
import com.paruspringapp.jpa.repositories.PersonRepository;
@SpringBootApplication(scanBasePackages = { "com.paruspringapp" })
public class ParuSpringApplication {
private static final Logger LOGGER = Logger.getLogger(ParuSpringApplication.class.getName());
public static void main(String[] args) throws Exception {
SpringApplication.run(ParuSpringApplication.class, args);
}
@Bean
public CommandLineRunner setup(PersonRepository personRepository) {
return (args) -> {
personRepository.save(new Person("Túrin", "Turambar"));
personRepository.save(new Person("Frodo", "Baggins"));
personRepository.save(new Person("Melkor", "Morgoth"));
LOGGER.info("The sample data has been generated");
};
}
}
<file_sep>package com.paruspringapp.services.rest.test;
import static org.hamcrest.collection.IsCollectionWithSize.hasSize;
import static org.hamcrest.core.Is.is;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.get;
import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.post;
import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.jsonPath;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.context.SpringBootTest.WebEnvironment;
import org.springframework.http.MediaType;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.setup.MockMvcBuilders;
import org.springframework.web.context.WebApplicationContext;
import com.paruspringapp.ParuSpringApplication;
@RunWith(SpringRunner.class)
@SpringBootTest(webEnvironment = WebEnvironment.RANDOM_PORT, classes = ParuSpringApplication.class)
@AutoConfigureMockMvc
@ContextConfiguration(classes = ParuSpringApplication.class)
public class PersonRestServiceTest {
@Autowired
private MockMvc mvc;
@Autowired
private WebApplicationContext wac;
@Before
public void setup() {
this.mvc = MockMvcBuilders.webAppContextSetup(wac).build();
}
@Test
public void testGetPersonById() throws Exception {
mvc.perform(get("/persons/1").accept(MediaType.APPLICATION_JSON))
.andExpect(jsonPath("$.firstname", is("Túrin"))).andDo(print());
}
@Test
public void testGetPersons() throws Exception {
mvc.perform(get("/persons").accept(MediaType.APPLICATION_JSON)).andExpect(jsonPath("$", hasSize(3)))
.andDo(print());
}
@Test
public void testSavePerson() throws Exception {
mvc.perform(post("/persons/").contentType(MediaType.APPLICATION_JSON)
.content("{\"id\" : \"4\", \"firstname\" : \"Titticus\", \"lastname\" : \"Nutsackius\" }")
.accept(MediaType.APPLICATION_JSON)).andDo(print());
}
}
<file_sep># paruspringapp
A template rest app using Spring Boot
<file_sep>FROM openjdk:8
ADD target/ParuSpringApp.jar ParuSpringApp.jar
EXPOSE 8085
ENTRYPOINT ["java", "-jar", "ParuSpringApp.jar"]<file_sep>package com.paruspringapp.services.business;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import com.paruspringapp.jpa.models.Person;
import com.paruspringapp.jpa.repositories.PersonRepository;
@Component
public class PersonServiceImpl implements PersonService {
@Autowired
private PersonRepository personRepository;
@Override
public List<Person> getPersons() {
return personRepository.findAll();
}
@Override
public Person getPersonById(final long id) {
return personRepository.getOne(id);
}
@Override
public Person savePerson(final Person person) {
return personRepository.save(person);
}
@Override
public Person updatePerson(final Person person) {
return personRepository.save(person);
}
@Override
public void deletePerson(final long id) {
personRepository.deleteById(id);
}
}
|
3e5795e41777f3de17f62220ad1c910f01a875c8
|
[
"Markdown",
"Java",
"Dockerfile"
] | 8
|
Java
|
shadowfacsimile/paruspringapp
|
ec518bc276c903825dcadb579e96a590cdc4fe89
|
831ed427c660c48f09b07aa1ff2a82bf8cc8db3e
|
refs/heads/master
|
<repo_name>Slaedr/EllipticFEM2D<file_sep>/libfem/fem.py
""" @brief Assembly routines of local and global FE matrices.
"""
import numpy as np
import numpy.linalg
from numpy import sin, cos, arctan
#from numba import jit, jitclass, int32, int64, float64, void, typeof
import scipy.special as scp
from .mesh import *
from .matrices import COOMatrix
from .coeff_functions import *
from .quadrature import GLQuadrature1D, GLQuadrature2DTriangle
from .elements import *
np.set_printoptions(linewidth=200)
cbig = 1.0e30
#@jit(nopython=True, cache=True)
def localStiffnessMatrix(gmap, elem, quadrature, stiffness_coeff_func, localstiff):
""" Computes the local stiffness matrix (of size ndofpvarel x ndofpvarel) of element elem.
ndofpvarel = number of DOFs per variable per element.
The output array localstiff needs to be pre-allocated with correct dimensions."""
ndof = localstiff.shape[0]
localstiff[:,:] = 0.0
basisg = np.zeros((ndof,2), dtype=np.float64)
jac = np.zeros((2,2), dtype=np.float64)
jacinv = np.zeros((2,2), dtype=np.float64)
for ig in range(quadrature.ng):
# get quadrature points and weights
x = quadrature.gp[ig,0]; y = quadrature.gp[ig,1]
w = quadrature.gw[ig]
# get basis gradients and jacobian determinant
elem.getBasisGradients(x,y,basisg)
jdet = gmap.getJacobian(x,y,jac,jacinv)
# physical location of quadrature point for coefficient function evaluation
gx,gy = gmap.evalGeomMapping(x,y)
# add contribution of this quadrature point to each integral
localstiff += w*stiffness_coeff_func(gx,gy)*jdet * np.dot(np.dot(basisg,jacinv), np.dot(jacinv.T,basisg.T))
def localH1Seminorm2(gmap, elem, quadrature, uvals):
""" Computes the local H^1 semi-norm squared of the FE function given by uvals on element elem.
"""
localseminorm2 = 0.0
ndof = uvals.shape[0]
basisg = np.zeros((ndof,2), dtype=np.float64)
jac = np.zeros((2,2), dtype=np.float64)
jacinv = np.zeros((2,2), dtype=np.float64)
for ig in range(quadrature.ng):
# get quadrature points and weights
x = quadrature.gp[ig,0]; y = quadrature.gp[ig,1]
w = quadrature.gw[ig]
# get basis gradients and jacobian determinant
elem.getBasisGradients(x,y,basisg)
jdet = gmap.getJacobian(x,y,jac,jacinv)
# add contribution of this quadrature point to the integral
dofsum = np.array([0.0,0.0])
for i in range(ndof):
dofsum[:] += uvals[i] * np.dot(jacinv.T, basisg[i,:])
localseminorm2 += np.dot(dofsum,dofsum) * w * jdet
return localseminorm2
#@jit(nopython=True, cache=True)
def localH1SeminormError2(gmap, elem, quadrature, uvals, time, exact_grad):
""" Computes the local H^1 semi-norm squared of the error
between the FE function given by uvals on element elem and the exact solution.
"""
localseminorm2 = 0.0
ndof = uvals.shape[0]
basisg = np.zeros((ndof,2), dtype=np.float64)
jac = np.zeros((2,2), dtype=np.float64)
jacinv = np.zeros((2,2), dtype=np.float64)
for ig in range(quadrature.ng):
# get quadrature points and weights
x = quadrature.gp[ig,0]; y = quadrature.gp[ig,1]
w = quadrature.gw[ig]
# get basis gradients and jacobian determinant
elem.getBasisGradients(x,y,basisg)
jdet = gmap.getJacobian(x,y,jac,jacinv)
# physical location of quadrature point for coefficient function evaluation
gx,gy = elem.evalGeomMapping(x,y)
nabla = exact_grad(gx,gy,time)
# add contribution of this quadrature point to the integral
dofsum1 = np.array([0.0,0.0]); dofsum2 = np.array([0.0, 0.0])
for i in range(ndof):
dofsum1[:] += uvals[i] * np.dot(jacinv.T, basisg[i,:])
dofsum2[:] += uvals[i] * np.dot(jacinv.T, basisg[i,:])
localseminorm2 += (np.dot(dofsum1,dofsum2)-np.dot(nabla,nabla)) * w * jdet
return localseminorm2
#@jit(nopython=True, cache=True)
def localMassMatrix(gmap, elem, quadrature, mass_coeff_func, localmass):
""" Computes the local mass matrix of element elem.
quadrature is the 2D quadrature contect to be used; has to be setup beforehand.
The output array localmass needs to be pre-allocated."""
ndof = localmass.shape[0]
localmass[:,:] = 0.0
basis = np.zeros(ndof, dtype=np.float64)
jac = np.zeros((2,2), dtype=np.float64)
jacinv = np.zeros((2,2), dtype=np.float64)
for ig in range(quadrature.ng):
# get quadrature points and weights
x = quadrature.gp[ig,0]; y = quadrature.gp[ig,1]
w = quadrature.gw[ig]
# get basis function values and jacobian determinant
elem.getBasisFunctions(x,y,basis)
jdet = gmap.getJacobian(x,y,jac,jacinv)
# physical location of quadrature point for coefficient function evaluation
gx,gy = gmap.evalGeomMapping(x,y)
# add contribution of this quadrature point to each integral
localmass += w*mass_coeff_func(gx,gy)*jdet * np.outer(basis,basis)
#@jit(nopython=True, cache=True)
def localL2Norm2(gmap, elem, quadrature, uvals):
""" Computes the L2 norm squared of a FE function with dofs uvals on element elem.
quadrature is the 2D quadrature contect to be used; has to be setup beforehand.
"""
ndof = uvals.shape[0]
localnorm2 = 0.0
basis = np.zeros(ndof, dtype=np.float64)
jac = np.zeros((2,2), dtype=np.float64)
jacinv = np.zeros((2,2), dtype=np.float64)
for ig in range(quadrature.ng):
# get quadrature points and weights
x = quadrature.gp[ig,0]; y = quadrature.gp[ig,1]
w = quadrature.gw[ig]
# get basis function values and jacobian determinant
elem.getBasisFunctions(x,y,basis)
jdet = gmap.getJacobian(x,y,jac,jacinv)
# add contribution of this quadrature point to the integral
dofsum = 0
for i in range(ndof):
dofsum += uvals[i] * basis[i]
localnorm2 += w * dofsum*dofsum * jdet
return localnorm2
def localL2Error2(gmap, elem, quadrature, uvals, time, exact_sol):
""" Computes the L2 norm squared of the error of FE solution with dofs uvals on element elem.
Actual values of the exact solution function are used at the quadrature points.
quadrature is the 2D quadrature context to be used; has to be setup beforehand.
"""
ndof = uvals.shape[0]
localnorm2 = 0.0
basis = np.zeros(ndof, dtype=np.float64)
jac = np.zeros((2,2), dtype=np.float64)
jacinv = np.zeros((2,2), dtype=np.float64)
for ig in range(quadrature.ng):
# get quadrature points and weights
x = quadrature.gp[ig,0]; y = quadrature.gp[ig,1]
w = quadrature.gw[ig]
# get basis function values and jacobian determinant
elem.getBasisFunctions(x,y,basis)
jdet = gmap.getJacobian(x,y,jac,jacinv)
# physical location of quadrature point for exact function evaluation
gx,gy = gmap.evalGeomMapping(x,y)
uexact = exact_sol.eval(gx,gy,time)
# add contribution of this quadrature point to the integral
dofsum = np.dot(uvals.T, basis)
localnorm2 += w * (dofsum-uexact)*(dofsum-uexact) * jdet
return localnorm2
#@jit(nopython=True, cache=True)
def localLoadVector_domain(gmap, elem, quadrature, rhs_func, localload):
""" Computes the domain integral part of the local load vector.
localload must be pre-allocated.
"""
ndof = localload.shape[0]
localload[:] = 0.0
basis = np.zeros(ndof, dtype=np.float64)
jac = np.zeros((2,2), dtype=np.float64)
jacinv = np.zeros((2,2), dtype=np.float64)
for ig in range(quadrature.ng):
# get quadrature points and weights
x = quadrature.gp[ig,0]; y = quadrature.gp[ig,1]
w = quadrature.gw[ig]
# get basis gradients and jacobian determinant
elem.getBasisFunctions(x,y,basis)
jdet = gmap.getJacobian(x,y,jac,jacinv)
# physical location of quadrature point for coefficient function evaluation
gx,gy = gmap.evalGeomMapping(x,y)
# add contribution of this quadrature point to each integral
localload += w * rhs_func(gx,gy)*jdet * basis
def localLoadVector_boundary(face, quadrature, localload):
""" Computes the local boundary integral part of load vector for Neumann BCs.
localload must be allocated before passing to this.
g is a scalar function of two variables describing the Neumann BC.
"""
pass
def assemble_stiffness(m, A, pdeg, ngauss, coeff_stiff):
""" Assembles the stiffness matrix
"""
# For a Lagrange element, the number of DOFs per element is the same as the number of nodes per element
elem = LagrangeTriangleElement()
elem.setDegree(pdeg)
gm = LagrangeTriangleMap()
if(m.nnodel[0] == 6):
gm.setDegree(2)
elif(m.nnodel[0] == 3):
gm.setDegree(1)
integ2d = GLQuadrature2DTriangle(ngauss)
#print(" assemble(): Beginning stiffness assembly loop")
# iterate over the elements and add contributions
for ielem in range(m.nelem):
# setup required local arrays
localstiff = np.zeros((elem.ndof, elem.ndof))
phynodes = np.zeros((m.nnodel[ielem], 2))
# set element
phynodes[:,:] = m.coords[m.inpoel[ielem,:m.nnodel[ielem]],:]
gm.setPhysicalElementNodes(phynodes)
# get local matrices
localStiffnessMatrix(gm, elem, integ2d, coeff_stiff, localstiff)
# add contributions to global
for i in range(m.nnodel[ielem]):
for j in range(m.nnodel[ielem]):
A.rind.append(m.inpoel[ielem,i])
A.cind.append(m.inpoel[ielem,j])
A.vals.append(localstiff[i,j])
def assemble_mass(m, A, pdeg, ngauss, coeff_mass):
""" Assembles mass matrix.
"""
# For a Lagrange element, the number of DOFs per element is the same as the number of nodes per element
elem = LagrangeTriangleElement()
elem.setDegree(pdeg)
gm = LagrangeTriangleMap()
if(m.nnodel[0] == 6):
gm.setDegree(2)
elif(m.nnodel[0] == 3):
gm.setDegree(1)
integ2d = GLQuadrature2DTriangle(ngauss)
#print(" assemble(): Beginning mass assembly loop")
# iterate over the elements and add contributions
for ielem in range(m.nelem):
# setup required local arrays
localmass = np.zeros((elem.ndof, elem.ndof))
phynodes = np.zeros((m.nnodel[ielem], 2))
# set element
phynodes[:,:] = m.coords[m.inpoel[ielem,:m.nnodel[ielem]],:]
gm.setPhysicalElementNodes(phynodes)
# get local matrices
localMassMatrix(gm, elem, integ2d, coeff_mass, localmass)
# add contributions to global
for i in range(m.nnodel[ielem]):
for j in range(m.nnodel[ielem]):
A.rind.append(m.inpoel[ielem,i])
A.cind.append(m.inpoel[ielem,j])
A.vals.append(localmass[i,j])
def applyDirichletPenaltiesLHS(m, A, dirBCnum, dirflags):
# penalty for Dirichlet rows and columns, also computes dirflags
""" For the row of each node corresponding to a Dirichlet boundary, multiply the diagonal entry by a huge number cbig,
and set the RHS as 0. This makes other entries in the row negligible, and the nodal value becomes
(almost) equal to the required boundary value 0.
@param A is a matrix with an accessor method, not an object of class COOMatrix.
"""
print(" applyDirichletPenalitesIterative(): Imposing penalties on Dirichlet rows")
for iface in range(m.nbface):
for inum in range(len(dirBCnum)):
if m.bface[iface,m.nnofa[iface]] == dirBCnum[inum]:
for inode in range(m.nnofa[iface]):
dirflags[m.bface[iface,inode]] = 1
for i in range(m.npoin):
if dirflags[i] == 1:
A[i,i] = cbig
def applyDirichletRHS(b, dirflags, dirval):
""" penalty for homogeneous Dirichlet rows and columns - for use with iterations like in time or nonlinear
Imposes a constant Dirichlet value.
"""
b[:] = np.where(dirflags==1, cbig*dirval, b[:])
#@jit (nopython=True, cache=True, locals = {"cbig":float64})
def assemble(m, dirBCnum, A, b, pdeg, ngauss, funcs):
""" Assembles a LHS matrix and RHS vector.
Applies a penalty method for Dirichlet BCs.
"""
# For a Lagrange element, the number of DOFs per element is the same as the number of nodes per element
elem = LagrangeTriangleElement()
elem.setDegree(pdeg)
gm = LagrangeTriangleMap()
if(m.nnodel[0] == 6):
gm.setDegree(2)
elif(m.nnodel[0] == 3):
gm.setDegree(1)
integ2d = GLQuadrature2DTriangle(ngauss)
#print("assemble(): Beginning assembly loop over elements.")
# iterate over the elements and add contributions
for ielem in range(m.nelem):
# setup required local arrays
localmass = np.zeros((elem.ndof, elem.ndof))
localstiff = np.zeros((elem.ndof, elem.ndof))
localload = np.zeros(elem.ndof)
phynodes = np.zeros((m.nnodel[ielem], 2))
# set element
phynodes[:,:] = m.coords[m.inpoel[ielem,:m.nnodel[ielem]],:]
gm.setPhysicalElementNodes(phynodes)
# get local matrices
localLoadVector_domain(gm, elem, integ2d, funcs.rhs, localload)
localStiffnessMatrix(gm, elem, integ2d, funcs.stiffness, localstiff)
localMassMatrix(gm, elem, integ2d, funcs.mass, localmass)
# add contributions to global
b[m.inpoel[ielem,:m.nnodel[ielem]]] += localload[:]
for i in range(m.nnodel[ielem]):
for j in range(m.nnodel[ielem]):
#A[m.inpoel[ielem,i], m.inpoel[ielem,j]] += localstiff[i,j] + localmass[i,j]
A.rind.append(m.inpoel[ielem,i])
A.cind.append(m.inpoel[ielem,j])
A.vals.append(localstiff[i,j]+localmass[i,j])
# penalty for Dirichlet rows and columns
""" For the row of each node corresponding to a Dirichlet boundary,
multiply the diagonal entry by a huge number cbig, and set the RHS as boundary_value * cbig.
This makes other entries in the row negligible, and the nodal value becomes
(almost) equal to the required boundary value.
I don't expect this to cause problems as the diagonal dominance of the matrix is increasing.
"""
#print("assembly(): Imposing penalties on Dirichlet rows")
cbig = 1.0e30
dirflags = np.zeros(m.npoin,dtype=np.int64)
for iface in range(m.nbface):
for inum in range(len(dirBCnum)):
if m.bface[iface,m.nnofa[iface]] == dirBCnum[inum]:
for inode in range(m.nnofa[iface]):
dirflags[m.bface[iface,inode]] = 1
"""for ipoin in range(m.npoin):
if dirflags[ipoin] == 1:
A[ipoin,ipoin] *= cbig
b[ipoin] = A[ipoin,ipoin]*dirichlet_function(m.coords[ipoin,0], m.coords[ipoin,1])"""
# Since the matrix is not assembled yet, we need to make sure to multiply by cbig only once per row.
# Note that the new diagonal value in Dirichlet rows will be
# cbig*a[i,i]_1 + a[i,i]_2 ... + a[i,i]_npsup, and
# b[i] is set to the same value times the boundary value.
processed = np.zeros(m.npoin, dtype=np.int64)
for i in range(m.npoin):
if dirflags[i] == 1:
b[i] = 0.0
for i in range(len(A.rind)):
if dirflags[A.rind[i]] == 1:
if A.cind[i] == A.rind[i]:
if processed[A.rind[i]] == 0:
processed[A.rind[i]] = 1
A.vals[i] *= cbig
b[A.rind[i]] += A.vals[i]
for i in range(m.npoin):
if dirflags[i] == 1:
b[i] *= funcs.dirichlet(m.coords[i,0], m.coords[i,1])
def removeDirichletRowsAndColumns(m,A,b,dirBCnum,funcs):
""" Alternatively, rather than use a penalty method, we can eliminate Dirichlet rows and columns.
"""
print("Removing Dirichlet rows and columns.")
# preprocessing to detect Dirichlet nodes; dirflag stores whether a given node is a Dirichlet node
ntotvars = m.npoin
dirflag = np.zeros(m.npoin,dtype=np.int32)
for iface in range(m.nbface):
for inum in range(len(dirBCnum)):
if(m.bface[iface,m.nnofa[iface]] == dirBCnum[inum]):
# if this face is a Dirichlet face, mark its nodes
for ibnode in range(m.nnofa[iface]):
dirflag[m.bface[iface,ibnode]] = 1
for ipoin in range(m.npoin):
ntotvars -= dirflag[ipoin]
Ad = np.zeros((ntotvars,ntotvars), dtype=np.float64)
bd = np.zeros(ntotvars, dtype=np.float64)
inocc = 0
for ipoin in range(m.npoin):
if(dirflag[ipoin] != 1):
bd[inocc] = b[ipoin]
jnocc = 0
for jpoin in range(m.npoin):
if dirflag[jpoin] != 1:
Ad[inocc,jnocc] = A[ipoin,jpoin]
jnocc += 1
else:
bd[inocc] -= ( A[ipoin,jpoin] * funcs.dirichlet(m.coords[jpoin,0],m.coords[jpoin,1]) )
inocc += 1
return (Ad,bd,dirflag)
def solveAndProcess(m, A, b, dirflag):
print("solveAndProcess: Solving and getting final solution vector")
xd = np.linalg.solve(A,b)
x = np.zeros(m.npoin, dtype=np.float64)
inocc = 0
for ipoin in range(m.npoin):
if(dirflag[ipoin] != 1):
x[ipoin] = xd[inocc]
inocc += 1
else:
x[ipoin] = dirichlet_function(m.coords[ipoin,0],m.coords[ipoin,1])
print("solveAndProcess: Done.")
return x,xd
#@jit(nopython=True, cache=True)
def compute_norm(m, v, pdeg, ngauss):
""" Compute the L2 and H1 norms of the FE function v
Note: it is currently assumed that all elements are topologically identical and use the same basis functions.
"""
# For a Lagrange element, the number of DOFs per element is the same as the number of nodes per element
elem = LagrangeTriangleElement()
elem.setDegree(pdeg)
gm = LagrangeTriangleMap()
if(m.nnodel[0] == 6):
gm.setDegree(2)
elif(m.nnodel[0] == 3):
gm.setDegree(1)
integ2d = GLQuadrature2DTriangle(ngauss)
l2norm = 0; h1norm = 0
# iterate over the elements and add contributions
for ielem in range(m.nelem):
# setup required local arrays
phynodes = np.zeros((m.nnodel[ielem], 2))
# set element
phynodes[:,:] = m.coords[m.inpoel[ielem,:m.nnodel[ielem]],:]
gm.setPhysicalElementNodes(phynodes)
uvals = v[m.inpoel[ielem,:m.nnodel[ielem]]]
# compute and add contribution of this element
l2normlocal = localL2Norm2(gm, elem, integ2d, uvals)
l2norm += l2normlocal; h1norm += l2normlocal
h1norm += localH1Seminorm2(gm, elem, integ2d, uvals)
return (np.sqrt(l2norm), np.sqrt(h1norm))
def compute_error(m, v, pdeg, ngauss, time, exact_soln):
""" Compute the L2 norm of the error of the FE solution v
Note: it is currently assumed that all elements are topologically identical and use the same basis functions.
"""
# For a Lagrange element, the number of DOFs per element is the same as the number of nodes per element
elem = LagrangeTriangleElement()
elem.setDegree(pdeg)
gm = LagrangeTriangleMap()
if(m.nnodel[0] == 6):
gm.setDegree(2)
elif(m.nnodel[0] == 3):
gm.setDegree(1)
integ2d = GLQuadrature2DTriangle(ngauss)
l2norm = 0; h1norm = 0
# iterate over the elements and add contributions
for ielem in range(m.nelem):
# setup required local arrays
phynodes = np.zeros((m.nnodel[ielem], 2))
# set element
phynodes[:,:] = m.coords[m.inpoel[ielem,:m.nnodel[ielem]],:]
gm.setPhysicalElementNodes(phynodes)
uvals = v[m.inpoel[ielem,:m.nnodel[ielem]]] # this only works for isoparametric!
# compute and add contribution of this element
l2normlocal = localL2Error2(gm, elem, integ2d, uvals, time, exact_soln)
l2norm += l2normlocal
return np.sqrt(l2norm)
<file_sep>/libfem/matrices.py
"""
@brief Defines some matrix-related functionality.
"""
import numpy as np
class COOMatrix:
def __init__(self, nrows, ncols):
self.rind = []
self.cind = []
self.vals = []
self.m = nrows
self.n = ncols
#@jit(nopython=True, cache=True)
def matvec(A,b):
# Matvec
x = np.zeros(b.shape)
for i in range(A.shape[0]):
for j in range(b.shape[0]):
x[i] += A[i,j]*b[j]
return x
#@jit(nopython=True, cache=True)
def dotprod(a,b):
# Scalar product
x = 0.0
for i in range(a.shape[0]):
x += a[i]*b[i]
return x
<file_sep>/libfem/quadrature.py
""" @brief Gauss-Lengendre quadrature rules for 1D and 2D integrals.
"""
import numpy as np
from numba import jitclass, int64, float64
spec = [('ng', int64), ('gp', float64[:,:]), ('gw', float64[:])]
class Quadrature:
def __init__(self, ngauss):
self.ng = ngauss
self.gp = np.zeros((self.ng,2))
self.gw = np.zeros(self.ng)
def evaluate(self, fvals):
""" Returns the integral. The number of entries in fvals must be ng."""
#return (self.gw*fvals).sum()
sum1 = 0.0
for i in range(self.ng):
sum1 += self.gw[i]*fvals[i]
return sum1
#@jitclass(spec)
class GLQuadrature1D(Quadrature):
def __init__(self, ngauss):
self.ng = ngauss
self.gp = np.zeros((self.ng,1), dtype=np.float64)
self.gw = np.zeros(self.ng, dtype=np.float64)
if self.ng == 1:
self.gp[0,0] = 0.0
self.gw[0] = 2.0
elif self.ng == 2:
self.gp[:,0] = [-1.0/np.sqrt(3), 1.0/np.sqrt(3)]
self.gw[:] = [1.0, 1.0]
elif self.ng == 3:
self.gp[:,0] = [-np.sqrt(3.0/5.0), 0, np.sqrt(3.0/5.0)]
self.gw[:] = [5.0/9.0, 8.0/9.0, 5.0/9.0]
elif self.ng == 4:
self.gp[:,0] = [-np.sqrt(3.0/7 + 2.0/7*np.sqrt(6.0/5)), -np.sqrt(3.0/7 - 2.0/7*np.sqrt(6.0/5)), np.sqrt(3.0/7 + 2.0/7*np.sqrt(6.0/5)), np.sqrt(3.0/7 + 2.0/7*np.sqrt(6.0/5))]
self.gw[:] = [(18.0-np.sqrt(30))/36.0, (18.0+np.sqrt(30))/36.0, (18.0+np.sqrt(30))/36.0, (18.0-np.sqrt(30))/36.0]
else:
print("! GLQuadrature1D: Quadrature with this number of Gauss points is not supported!")
#@jitclass(spec)
class GLQuadrature2DTriangle(Quadrature):
def __init__(self, ngauss):
self.ng = ngauss
self.gp = np.zeros((self.ng,2), dtype=np.float64)
self.gw = np.zeros(self.ng, dtype=np.float64)
if self.ng == 1:
self.gp[0,:] = [1.0/3, 1.0/3]
self.gw[0] = 0.5
#print("GLQuadrature2DTriangle: Ngauss = 1.")
elif self.ng == 3:
self.gp[:,:] = np.array([0.6666666666667,0.1666666666667 , 0.1666666666667,0.6666666666667, 0.1666666666667,0.1666666666667]).reshape((self.ng,2))
self.gw[:] = [0.1666666666667, 0.1666666666667, 0.1666666666667]
#print("GLQuadrature2DTriangle: Ngauss = 3.")
elif self.ng == 4:
self.gp = np.array([0.33333333333,0.33333333333, 0.20000000000,0.20000000000, 0.20000000000, 0.60000000000, 0.60000000000, 0.20000000000]).reshape((self.ng,2))
self.gw[:] = [-0.28125000000, 0.26041666667, 0.26041666667, 0.26041666667]
#print("GLQuadrature2DTriangle: Ngauss = 4.")
elif self.ng == 6:
self.gp[:,:] = np.array([0.108103018168070,0.445948490915965,
0.445948490915965,0.108103018168070,
0.445948490915965,0.445948490915965,
0.816847572980459,0.091576213509771,
0.091576213509771,0.816847572980459,
0.091576213509771,0.091576213509771]).reshape((self.ng,2))
self.gw[:] = [0.1116907948390055,
0.1116907948390055,
0.1116907948390055,
0.0549758718276610,
0.0549758718276610,
0.0549758718276610]
#print("GLQuadrature2DTriangle: Ngauss = 6.")
elif self.ng == 12:
self.gp[:,:] = np.array([0.873821971016996,0.063089014491502,
0.063089014491502,0.873821971016996,
0.063089014491502,0.063089014491502,
0.501426509658179,0.249286745170910,
0.249286745170910,0.501426509658179,
0.249286745170910,0.249286745170910,
0.636502499121399,0.310352451033785,
0.636502499121399,0.053145049844816,
0.310352451033785,0.636502499121399,
0.310352451033785,0.053145049844816,
0.053145049844816,0.310352451033785,
0.053145049844816,0.636502499121399]).reshape((self.ng,2))
self.gw[:] = [0.0254224531851035,
0.0254224531851035,
0.0254224531851035,
0.0583931378631895,
0.0583931378631895,
0.0583931378631895,
0.0414255378091870,
0.0414255378091870,
0.0414255378091870,
0.0414255378091870,
0.0414255378091870,
0.0414255378091870]
#print("GLQuadrature2DTriangle: Ngauss = 12.")
else:
print("! GLQuadrature2DTriangle: Quadrature with this number of Gauss points is not supported!")
<file_sep>/__init__.py
__all__ = ["libfem"]
<file_sep>/libfem/plotconv.py
#! /usr/bin/env python3
import sys
import numpy as np
from matplotlib import pyplot as plt
if(len(sys.argv) < 2):
print("Error. Please provide input file name.")
sys.exit(-1)
fname = sys.argv[1]
title = fname.split('/')[-1]
data = np.genfromtxt(fname)
n = data.shape[0]
pslope = np.zeros(data.shape[1])
labels = ['L2: ','H1: ']
symbs = ['o-', 's-']
for j in range(1,data.shape[1]):
psigy = data[:,j].sum()
sigx = data[:,0].sum()
sigx2 = (data[:,0]*data[:,0]).sum()
psigxy = (data[:,j]*data[:,0]).sum()
pslope[j] = (n*psigxy-sigx*psigy)/(n*sigx2-sigx**2)
print("Slope is " + str(pslope[j]))
plt.plot(data[:,0],data[:,j],symbs[j-1],label=labels[j-1]+str(pslope[j]))
#plt.plot(data[:,0],data[:,2],'s-',label=labels[1]+str(pslope[1]))
plt.title("Grid-refinement (legend: slopes)") # + title)
plt.xlabel("Log mesh size")
plt.ylabel("Log error")
plt.legend()
plt.show()
<file_sep>/libfem/output.py
""" @file output.py
@brief Routines for output to VTU file
"""
import numpy as np
from .mesh import Mesh2d
def writePointScalarToVTU(m, filename, scalarname, x):
""" Writes one scalar mesh function into a VTU file.
"""
fout = open(filename, 'w')
fout.write("<VTKFile type=\"UnstructuredGrid\" version=\"0.1\" byte_order=\"LittleEndian\">\n")
fout.write("<UnstructuredGrid>\n")
fout.write("\t<Piece NumberOfPoints=\""+ str(m.npoin)+ "\" NumberOfCells=\""+ str(m.nelem)+ "\">\n")
fout.write("\t\t<PointData Scalars=\""+scalarname+"\">\n")
fout.write("\t\t\t<DataArray type=\"Float64\" Name=\""+ scalarname+ "\" Format=\"ascii\">\n")
for i in range(m.npoin):
fout.write("\t\t\t\t"+ str(x[i])+ "\n")
fout.write("\t\t\t</DataArray>\n")
fout.write("\t\t</PointData>\n")
fout.write("\t\t<Points>\n")
fout.write("\t\t<DataArray type=\"Float64\" NumberOfComponents=\"3\" Format=\"ascii\">\n")
for ipoin in range(m.npoin):
fout.write("\t\t\t"+ str(m.coords[ipoin,0])+ " "+ str(m.coords[ipoin,1])+ " 0.0\n")
fout.write("\t\t</DataArray>\n\t\t</Points>\n")
fout.write("\t\t<Cells>\n")
fout.write("\t\t\t<DataArray type=\"UInt32\" Name=\"connectivity\" Format=\"ascii\">\n")
for i in range(m.nelem):
fout.write("\t\t\t\t")
elemcode = 5
if m.nnodel[i] == 4:
elemcode = 9
elif m.nnodel[i] == 6:
elemcode = 22
elif m.nnodel[i] == 9:
elemcode = 28
for j in range(m.nnodel[i]):
fout.write(str(m.inpoel[i,j]) + " ")
fout.write("\n")
fout.write("\t\t\t</DataArray>\n")
fout.write("\t\t\t<DataArray type=\"UInt32\" Name=\"offsets\" Format=\"ascii\">\n")
for i in range(m.nelem):
fout.write("\t\t\t\t" + str(m.nnodel[i]*(i+1)) + "\n")
fout.write("\t\t\t</DataArray>\n")
fout.write("\t\t\t<DataArray type=\"Int32\" Name=\"types\" Format=\"ascii\">\n")
for i in range(m.nelem):
fout.write("\t\t\t\t" + str(elemcode) + "\n")
fout.write("\t\t\t</DataArray>\n")
fout.write("\t\t</Cells>\n")
fout.write("\t</Piece>\n</UnstructuredGrid>\n</VTKFile>")
fout.close()
<file_sep>/libfem/__init__.py
# The main module
__all__ = ["mesh","quadrature","elements","fem"]
<file_sep>/tests/driver_heat_timesteps.py
import sys
sys.path.append("..")
import gc
import numba
import numpy as np
import scipy.sparse as scs
import scipy.sparse.linalg as scsl
from scipy.special import jn_zeros, j0
from matplotlib import pyplot as plt
from libfem.mesh import *
from libfem.matrices import COOMatrix
from libfem.fem import *
from libfem.ode1 import *
from libfem.output import *
np.set_printoptions(linewidth=200, threshold=5000)
# ----------
# user input
# ----------
finaltime = 0.2
dt = 0.01
ntimesteps = 3
meshfile = "inputs/discquad4"
dirBCnum = np.array([2,])
ngauss = 6
a = 0.9
#-----------
############
def rhs_func(x,y):
return 0.0
def stiffness_coeff_func(x,y):
# equal to -a!
return -a
def mass_coeff_func(x,y):
return 1.0
def dirichlet_function(x,y):
return 0.0
class ExactSol:
def __init__(self, a):
self.r2 = jn_zeros(0,2)[-1]
self.a = a
print("ExactSol: Bessel zero = "+str(self.r2))
def eval(self,x,y,t):
return np.exp(-self.r2*self.r2*self.a*t)*j0(self.r2*np.sqrt(x*x+y*y))
funcs = CoeffFunctions(rhs_func, stiffness_coeff_func, mass_coeff_func, dirichlet_function, a)
exactsol = ExactSol(a)
# preprocess file names
outs = []
basename = meshfile.split('/')[-1]
for it in range(ntimesteps):
outs.append("../fem2d-results/"+basename+"_t"+str(it)+".vtu")
data = np.zeros((ntimesteps,2),dtype=np.float64)
mio = Mesh2dIO()
mio.readGmsh(meshfile+".msh")
m = Mesh2d(mio.npoin, mio.nelem, mio.nbface, mio.maxnnodel, mio.maxnnofa, mio.nbtags, mio.ndtags,
mio.coords, mio.inpoel, mio.bface, mio.nnodel, mio.nfael, mio.nnofa, mio.dtags)
mio = 0
poly_degree = 0
if m.nnodel[0] == 3:
poly_degree = 1
elif m.nnodel[0] == 6:
poly_degree = 2
print("Approximation polynomial degree = " + str(poly_degree))
# assemble
Ac = COOMatrix(m.npoin, m.npoin)
Mc = COOMatrix(m.npoin, m.npoin)
b = np.zeros(m.npoin, dtype=np.float64)
assemble_stiffness(m, Ac, poly_degree, ngauss, stiffness_coeff_func)
assemble_mass(m, Mc, poly_degree, ngauss, mass_coeff_func)
uexact = np.zeros(m.npoin)
uexact[:] = exactsol.eval(m.coords[:,0], m.coords[:,1], finaltime)
for it in range(ntimesteps):
print("Time step " + str(it))
A = scs.csc_matrix((Ac.vals,(Ac.rind,Ac.cind)), shape=(m.npoin,m.npoin))
M = scs.csc_matrix((Mc.vals,(Mc.rind,Mc.cind)), shape=(m.npoin,m.npoin))
#be = LForwardEuler(m, dirBCnum, dt)
#be = LBackwardEuler(m, dirBCnum, dt)
be = LCrankNicolson(m, dirBCnum, dt)
be.setOperators(A,M)
# set initial solution
un = np.zeros(m.npoin)
un[:] = exactsol.eval(m.coords[:,0], m.coords[:,1], 0.0)
t = 0.0; step = 0
while t < finaltime - 1e-10:
be.step(un)
t += dt
step += 1
if step % 20 == 0:
print(" Time step " + str(step) + ": Time = " + str(t))
print(" Final time = " + str(t))
l2norm = compute_error(m, un, poly_degree, ngauss, finaltime, exactsol)
print("Time-step " + str(it))
print(" The mesh size paramter, the error's L2 norm and its H1 norm (log base 10):")
print(" "+str(np.log10(dt)) + " " + str(np.log10(l2norm)) + "\n")
data[it,0] = np.log10(dt)
data[it,1] = np.log10(l2norm)
#writePointScalarToVTU(m, outs[it], "heat", un)
dt = dt/2.0
# plots
n = ntimesteps
pslope = np.zeros(data.shape[1])
labels = ['L2: ','H1: ', 'Inf:']
symbs = ['o-', 's-', '^-']
for j in range(1,data.shape[1]):
psigy = data[:,j].sum()
sigx = data[:,0].sum()
sigx2 = (data[:,0]*data[:,0]).sum()
psigxy = (data[:,j]*data[:,0]).sum()
pslope[j] = (n*psigxy-sigx*psigy)/(n*sigx2-sigx**2)
print("Slope is " + str(pslope[j]))
plt.plot(data[:,0],data[:,j],symbs[j-1],label=labels[j-1]+str(pslope[j]))
plt.title("Temporal convergence (legend: slopes)") # + title)
plt.xlabel("Log time-step")
plt.ylabel("Log error")
plt.legend()
plt.show()
<file_sep>/README.md
ConFEM 2D
=========
Implementation of 2D finite elements written in Python. Performance is achieved using the Numpy and Scipy libraries and Numba JIT compilation.
Finite Elements
---------------
Geometric mappings (from reference elements) are Lagrange only. Currently, P1 and P2 triangular elements are implemented, but any other P is easy to add.
The basis functions are currently Lagrange too.
Spatial operators
-----------------
Currently, the diffusion operator (stiffness matrix) and identity operator (mass matrix) are implemented.
Time-stepping
-------------
Forward Euler, backward Euler and Crank-Nicolson schemes are available.
<file_sep>/tests/driver_elliptic.py
import sys
sys.path.append("..")
import numba
import numpy as np
import scipy.sparse as scs
import scipy.sparse.linalg as scsl
from matplotlib import pyplot as plt
from libfem.mesh import *
from libfem.matrices import COOMatrix
from libfem.coeff_functions import *
from libfem.fem import *
from libfem.output import *
def poisson_test():
""" Tests convergence of P1 elements for the Poisson problem.
@return A boolean value indicating whether the test passed.
"""
# user input
numberofmeshes = 3
meshfile = "inputs/squarehole"
dirBCnum = np.array([2,4])
#dirBCnum = np.array([12,13])
ngauss = 6
# functions
#@jit(nopython=True, cache=True)
def rhs_func(x,y):
return x*x + y*y-14.0
#@jit(nopython=True, cache=True)
def stiffness_coeff_func(x,y):
return 1.0
#@jit(nopython=True, cache=True)
def mass_coeff_func(x,y):
return 1.0
#@jit(nopython=True, cache=True)
def exact_sol(x,y,t):
return x*x + y*y - 10.0
#@jit(nopython=True, cache=True)
def dirichlet_function(x,y):
return exact_sol(x,y,0)
class ExactSol:
def eval(self,x,y,t):
return x*x + y*y - 10.0
funcs = CoeffFunctions(rhs_func, stiffness_coeff_func, mass_coeff_func, dirichlet_function, 0.0)
exactsol = ExactSol()
# preprocess file names
meshes = []
outs = []
for imesh in range(numberofmeshes):
meshes.append(meshfile+str(imesh)+".msh")
outs.append(meshfile+str(imesh)+".vtu")
data = np.zeros((numberofmeshes,3),dtype=np.float64)
for imesh in range(numberofmeshes):
# mesh
mio = Mesh2dIO()
mio.readGmsh(meshes[imesh])
m = Mesh2d(mio.npoin, mio.nelem, mio.nbface, mio.maxnnodel, mio.maxnnofa, mio.nbtags, mio.ndtags,
mio.coords, mio.inpoel, mio.bface, mio.nnodel, mio.nfael, mio.nnofa, mio.dtags)
mio = 0
# The code currently only works for isoparametric discretization
poly_degree = 0
if m.nnodel[0] == 3:
poly_degree = 1
elif m.nnodel[0] == 6:
poly_degree = 2
if imesh == 0:
print("Approximation polynomial degree = " + str(poly_degree))
# compute
Ac = COOMatrix(m.npoin, m.npoin)
b = np.zeros(m.npoin, dtype=np.float64)
assemble(m, dirBCnum, Ac, b, poly_degree, ngauss, funcs)
A = scs.csc_matrix((Ac.vals,(Ac.rind,Ac.cind)), shape=(m.npoin,m.npoin))
#x,info = scsl.gmres(A,b,tol=1e-5, maxiter=500)
lu = scsl.splu(A)
x = lu.solve(b)
#(Ad,bd,dirflags) = removeDirichletRowsAndColumns(m,A,b,dirBCnum)
#x,xd = solveAndProcess(m, Ad, bd, dirflags)
# output
#writePointScalarToVTU(m, outs[imesh], "poisson", x)
# errors - uncomment for non-exact L2 and H1 error norms
#err = np.zeros(m.npoin, dtype=np.float64)
#err[:] = exact_sol(m.coords[:,0], m.coords[:,1])
##writePointScalarToVTU(m, "../fem2d-results/"+meshes[imesh]+"-exact.vtu", "exact", err)
#err[:] = err[:] - x[:]
#l2norm, h1norm = compute_norm(m, err, poly_degree, ngauss)
# uncomment for "exact" L2 errors, but no H1 error computation
l2norm = compute_error(m, x, poly_degree, ngauss, 0, exactsol)
h1norm = l2norm
"""print("Mesh " + str(imesh))
print(" The mesh size paramter, the error's L2 norm and its H1 norm (log base 10):")
print(" "+str(np.log10(m.h)) + " " + str(np.log10(l2norm)) + " " + "NA" + "\n")"""
data[imesh,0] = np.log10(m.h)
data[imesh,1] = np.log10(l2norm)
data[imesh,2] = np.log10(h1norm)
# convergence order and plots
n = numberofmeshes
pslope = np.zeros(data.shape[1])
labels = ['L2: ','H1: ', 'Inf:']
symbs = ['o-', 's-', '^-']
for j in range(1,data.shape[1]):
psigy = data[:,j].sum()
sigx = data[:,0].sum()
sigx2 = (data[:,0]*data[:,0]).sum()
psigxy = (data[:,j]*data[:,0]).sum()
pslope[j] = (n*psigxy-sigx*psigy)/(n*sigx2-sigx**2)
print("Slope is " + str(pslope[j]))
#plt.plot(data[:,0],data[:,j],symbs[j-1],label=labels[j-1]+str(pslope[j]))
testpass = pslope[-1] < 2.1 and pslope[-1] > 1.9
return testpass
#plt.title("Grid-refinement (legend: slopes)") # + title)
#plt.xlabel("Log mesh size")
#plt.ylabel("Log error")
#plt.legend()
#plt.show()
if __name__ == "__main__":
passval = poisson_test()
if passval:
print("Poisson_test: Passed!\n")
<file_sep>/tests/driver_heat.py
import sys
sys.path.append("..")
import gc
import numba
import numpy as np
import scipy.sparse as scs
import scipy.sparse.linalg as scsl
from scipy.special import jn_zeros, j0
from matplotlib import pyplot as plt
from libfem.mesh import *
from libfem.matrices import COOMatrix
from libfem.fem import *
from libfem.ode1 import *
from libfem.output import *
np.set_printoptions(linewidth=200, threshold=5000)
# user input
finaltime = 0.2
dt = 0.001
numberofmeshes = 5
meshfile = "inputs/disc"
dirBCnum = np.array([2,])
ngauss = 6
a = 1.0
def rhs_func(x,y):
return 0.0
def stiffness_coeff_func(x,y):
# equal to -a!
return -a
def mass_coeff_func(x,y):
return 1.0
def dirichlet_function(x,y):
return 0.0
class ExactSol:
def __init__(self, a):
self.r2 = jn_zeros(0,2)[-1]
self.a = a
print("ExactSol: Bessel zero = "+str(self.r2))
def eval(self,x,y,t):
return np.exp(-self.r2*self.r2*self.a*t)*j0(self.r2*np.sqrt(x*x+y*y))
funcs = CoeffFunctions(rhs_func, stiffness_coeff_func, mass_coeff_func, dirichlet_function, a)
exactsol = ExactSol(a)
# preprocess file names
meshes = []
outs = []
basename = meshfile.split('/')[-1]
for imesh in range(numberofmeshes):
meshes.append(meshfile+str(imesh)+".msh")
outs.append("inputs/"+basename+str(imesh)+".vtu")
data = np.zeros((numberofmeshes,2),dtype=np.float64)
for imesh in range(numberofmeshes):
print("Mesh " + str(imesh))
mio = Mesh2dIO()
mio.readGmsh(meshes[imesh])
m = Mesh2d(mio.npoin, mio.nelem, mio.nbface, mio.maxnnodel, mio.maxnnofa, mio.nbtags, mio.ndtags,
mio.coords, mio.inpoel, mio.bface, mio.nnodel, mio.nfael, mio.nnofa, mio.dtags)
mio = 0
poly_degree = 0
if m.nnodel[0] == 3:
poly_degree = 1
elif m.nnodel[0] == 6:
poly_degree = 2
if imesh == 0:
print(" Approximation polynomial degree = " + str(poly_degree))
# assemble
Ac = COOMatrix(m.npoin, m.npoin)
Mc = COOMatrix(m.npoin, m.npoin)
b = np.zeros(m.npoin, dtype=np.float64)
assemble_stiffness(m, Ac, poly_degree, ngauss, stiffness_coeff_func)
assemble_mass(m, Mc, poly_degree, ngauss, mass_coeff_func)
A = scs.csc_matrix((Ac.vals,(Ac.rind,Ac.cind)), shape=(m.npoin,m.npoin))
M = scs.csc_matrix((Mc.vals,(Mc.rind,Mc.cind)), shape=(m.npoin,m.npoin))
#be = LBackwardEuler(m, dirBCnum, dt)
be = LCrankNicolson(m, dirBCnum, dt)
be.setOperators(A,M)
# set initial solution
un = np.zeros(m.npoin)
un[:] = exactsol.eval(m.coords[:,0], m.coords[:,1], 0.0)
t = 0.0; step = 0
while t < finaltime - 1e-10:
be.step(un)
t += dt
step += 1
if step % 5 == 0:
print(" Time step " + str(step) + ": Time = " + str(t))
print(" Final time = " + str(t))
#writePointScalarToVTU(m, outs[imesh], "heat", un)
l2norm = compute_error(m, un, poly_degree, ngauss, finaltime, exactsol)
print("Mesh " + str(imesh))
print(" The mesh size paramter, the error's L2 norm and its H1 norm (log base 10):")
print(" "+str(np.log10(m.h)) + " " + str(np.log10(l2norm)) + "\n")
data[imesh,0] = np.log10(m.h)
data[imesh,1] = np.log10(l2norm)
# plots
n = numberofmeshes
pslope = np.zeros(data.shape[1])
labels = ['L2: ','H1: ', 'Inf:']
symbs = ['o-', 's-', '^-']
for j in range(1,data.shape[1]):
psigy = data[:,j].sum()
sigx = data[:,0].sum()
sigx2 = (data[:,0]*data[:,0]).sum()
psigxy = (data[:,j]*data[:,0]).sum()
#pslope[j] = (n*psigxy-sigx*psigy)/(n*sigx2-sigx**2)
pslope[j] = (data[-1,j]-data[-2,j])/(data[-1,0]-data[-2,0])
print("Slope is " + str(pslope[j]))
plt.plot(data[:,0],data[:,j],symbs[j-1],label=labels[j-1]+str(pslope[j]))
plt.title("Grid-refinement (legend: slopes)") # + title)
plt.xlabel("Log mesh size")
plt.ylabel("Log error")
plt.legend()
plt.show()
<file_sep>/libfem/ode1.py
"""
@brief Integration of first-order ODEs (in time)
"""
import numpy as np
from numpy import sin, cos, arctan
import scipy.sparse as scs
import scipy.sparse.linalg as scsl
import scipy.linalg as scl
from .mesh import *
from .quadrature import GLQuadrature1D, GLQuadrature2DTriangle
from .elements import *
from .fem import *
np.set_printoptions(linewidth=200)
def factorizeMatrix(A):
return scsl.splu(A)
class LinearOde1Delta:
""" Base class for first-order ODE integration """
def __init__(self, mesh, dirBCnum):
self.m = mesh
self.dbn = dirBCnum
self.dt = 0.0
self.dirflags = np.zeros(mesh.npoin,dtype=np.int32)
def setOperators(self, A, M):
# Set the spatial operators (eg. stiffnesss matrix) and mass matrix
pass
def step(self, un):
# solve for the change u^{n+1} - u^n
b = self.A.dot(un)
applyDirichletRHS(b, self.dirflags, 0.0)
deltau = self.fM.solve(b)
un[:] = un[:] + deltau[:]
class LForwardEuler(LinearOde1Delta):
""" Forward Euler scheme """
def __init__(self, mesh, dirBCnum, dt):
LinearOde1Delta.__init__(self, mesh, dirBCnum)
self.dt = dt
def setOperators(self, A, M):
# NOTE: Modifies M
M = M.multiply(1.0/self.dt)
applyDirichletPenaltiesLHS(self.m, M, self.dbn, self.dirflags)
self.fM = factorizeMatrix(M)
self.A = A
class LBackwardEuler(LinearOde1Delta):
""" Backward Euler scheme """
def __init__(self, mesh, dirBCnum, dt):
LinearOde1Delta.__init__(self, mesh, dirBCnum)
self.dt = dt
def setOperators(self, A, M):
# NOTE: M is modified
self.A = A
M = M.multiply(1.0/self.dt)-A
applyDirichletPenaltiesLHS(self.m, M, self.dbn, self.dirflags)
self.fM = factorizeMatrix(M)
class LCrankNicolson(LinearOde1Delta):
def __init__(self, mesh, dirBCnum, dt):
LinearOde1Delta.__init__(self, mesh, dirBCnum)
self.dt = dt
def setOperators(self, A, M):
# NOTE: M is modified
self.A = A
M = M.multiply(1.0/self.dt) - A.multiply(0.5)
# apply penalties to Dirichlet rows and classify points into Dirichlet or not (dirflags)
applyDirichletPenaltiesLHS(self.m, M, self.dbn, self.dirflags)
self.fM = factorizeMatrix(M)
class LBDF1:
""" Alternative backward Euler formulation"""
def __init__(self,mesh, dirBCnum, dirvalue, dt):
self.m = mesh
self.dbn = dirBCnum
self.dirval = dirvalue
self.dt = dt
self.dirflags = np.zeros(mesh.npoin,dtype=np.int32)
def setOperators(self, A, M):
self.resop = M.multiply(1.0/self.dt)
temp = self.resop - A
applyDirichletPenaltiesLHS(self.m, temp, self.dbn, self.dirflags)
self.jac = factorizeMatrix(temp)
def step(self, un):
# Overwrites the input
b = self.resop.dot(un)
applyDirichletRHS(b, self.dirflags, self.dirval)
un[:] = self.jac.solve(b)
<file_sep>/libfem/mesh.py
""" @brief 2D Mesh handling
"""
import numpy as np
import numba
from numba import jit, jitclass, int64, float64
meshclassspec = [('nbtags',int64),('ndtags',int64), ('npoin', int64), ('nelem',int64), ('nbface',int64), ('maxnnodel',int64), ('maxnnofa',int64),
('nnodel',int64[:]), ('nnofa',int64[:]), ('nfael', int64[:]),
('coords', float64[:,:]), ('inpoel', int64[:,:]), ('bface',int64[:,:]), ('dtags',int64[:,:]), ('h',float64) ]
#@jitclass(meshclassspec)
class Mesh2d:
""" @brief Stores the mesh data in a jit-able form
Contains:
- npoin: number of vertices in the mesh
- nelem: number of elements
- nbface: number of boundary faces
- maxnnodel: max number of nodes per element
- nbtags: number of boundary tags for each boundary face
- ndtags number of domain tags for each element
- nnodel: array containing number of nodes in each element
- nnofa: array containing number of nodes in each face
- coords: array containing coordinates of mesh vertices
- inpoel: interconnecitivity matrix; node numbers (in coords) of the nodes making up each element
- dtags: array containing domain marker tags
- bface: array containing vertex numbers of vertices in each boundary face, as well as two boundary markers per boundary face
Note that BCs are handled later with the help of the tags stored in bface,
which are read from the Gmsh file.
"""
def __init__(self, npo, ne, nf, maxnp, maxnf, nbt, ndt, _coords, _inpoel, _bface, \
_nnodel, _nfael, _nnofa, _dtags):
self.npoin = npo
self.nelem = ne
self.nbface = nf
self.maxnnodel = maxnp
self.maxnnofa = maxnf
self.nbtags = nbt
self.ndtags = ndt
self.coords = np.zeros((self.npoin,2),dtype=np.float64)
self.inpoel = np.zeros((self.nelem,self.maxnnodel),dtype=np.int64)
self.bface = np.zeros((self.nbface,self.maxnnofa+self.nbtags),dtype=np.int64)
self.nnofa = np.zeros(self.nbface, dtype=np.int64)
self.nnodel = np.zeros(self.nelem,dtype=np.int64)
self.nfael = np.zeros(self.nelem,dtype=np.int64)
self.dtags = np.zeros((self.nelem,2),dtype=np.int64)
self.coords[:,:] = _coords[:,:]
self.inpoel[:,:] = _inpoel[:,:]
self.bface[:,:] = _bface[:,:]
self.nnodel[:] = _nnodel[:]
self.nfael[:] = _nfael[:]
self.nnofa[:] = _nnofa[:]
self.dtags[:,:] = _dtags[:,:]
# compute mesh size parameter h
# length of longest edge in the mesh - reasonable for triangular elements
self.h = 0.0
for ielem in range(self.nelem):
localh = 0.0
for iface in range(self.nfael[ielem]):
facevec = self.coords[self.inpoel[ielem, (iface+1) % self.nfael[ielem]],:] - self.coords[self.inpoel[ielem,iface],:]
faceh = np.sqrt(facevec[0]*facevec[0]+facevec[1]*facevec[1])
if self.h < faceh:
self.h = faceh
#print("Mesh2d: Stored mesh data. nelem = "+str(self.nelem)+", npoin = "+str(self.npoin)+", nbface = "+str(self.nbface))
class Mesh2dIO:
""" @brief Reads and processes the mesh data.
Contains:
- npoin: number of vertices in the mesh
- nelem: number of elements
- nbface: number of boundary faces
- maxnnodel: max number of nodes per element
- nbtags: number of boundary tags for each boundary face
- ndtags number of domain tags for each element
- nnodel: array containing number of nodes in each element
- nnofa: array containing number of nodes in each face
- coords: array containing coordinates of mesh vertices
- inpoel: interconnecitivity matrix; node numbers (in coords) of the nodes making up each element
- dtags: array containing domain marker tags
- bface: array containing vertex numbers of vertices in each boundary face, as well as two boundary markers per boundary face
Note that BCs are handled later with the help of the tags stored in bface, which are read from the Gmsh file.
"""
def __init__(self):
self.npoin = 0
self.nelem = 0
self.nbface = 0
self.maxnnodel = 6
self.maxnnofa = 3
self.nbtags = 2
self.ndtags = 2
def readGmsh(self, fname):
""" Reads a Gmsh2 mesh file."""
f = open(fname,'r')
for i in range(4):
f.readline()
self.npoin = int(f.readline())
temp = np.fromfile(f, dtype=float, count=self.npoin*4, sep=" ").reshape((self.npoin,4))
self.coords = temp[:,1:-1]
#print(" readGmsh(): Coords read. Shape of coords is "+str(self.coords.shape))
for i in range(2):
f.readline()
nallelem = int(f.readline())
allelems = np.zeros((nallelem,self.ndtags+12))
self.nbface = 0
self.nelem = 0
# first we just read everything in the order given
for i in range(nallelem):
# store everything but the first entry in the line
elem = np.array(f.readline().split(),dtype=int)[1:]
if elem[0] == 1 or elem[0] == 8:
self.nbface += 1
elif elem[0] == 2 or elem[0]==9 or elem[0] == 3 or elem[0]==10:
self.nelem += 1
else:
print("! readGmsh(): ! Invalid element type!")
for j in range(len(elem)):
allelems[i,j] = elem[j]
f.close()
self.bface = np.zeros((self.nbface, self.maxnnofa+self.nbtags),dtype=int)
self.inpoel = np.zeros((self.nelem, self.maxnnodel), dtype=int)
self.dtags = np.zeros((self.nelem,self.ndtags), dtype=int)
self.nnodel = np.zeros(self.nelem,dtype=int)
self.nfael = np.zeros(self.nelem,dtype=np.int32)
self.nnofa = np.zeros(self.nbface,dtype=np.int32)
iface = 0; ielem = 0
for i in range(nallelem):
if allelems[i,0] == 1:
# P1 line segment
self.nnofa[iface] = 2
self.bface[iface, :self.nnofa[iface]] = allelems[i, 2+self.nbtags:2+self.nbtags+self.nnofa[iface]]-1
self.bface[iface, self.nnofa[iface]:self.nnofa[iface]+self.nbtags] = allelems[i, 2:2+self.nbtags]
iface += 1
elif allelems[i,0] == 8:
# P2 line segment
self.nnofa[iface] = 3
self.bface[iface, :self.nnofa[iface]] = allelems[i, 2+self.nbtags:2+self.nbtags+self.nnofa[iface]]-1
self.bface[iface, self.nnofa[iface]:self.nnofa[iface]+self.nbtags] = allelems[i, 2:2+self.nbtags]
iface += 1
elif allelems[i,0] == 2:
# P1 tri
self.nnodel[ielem] = 3
self.nfael[ielem] = 3
self.inpoel[ielem, :self.nnodel[ielem]] = allelems[i, 2+self.ndtags:2+self.ndtags+self.nnodel[ielem]]-1
self.dtags[ielem, :self.ndtags] = allelems[i, 2:2+self.ndtags]
ielem += 1
elif allelems[i,0] == 9:
# P2 tri
self.nnodel[ielem] = 6
self.nfael[ielem] = 3
self.inpoel[ielem, :self.nnodel[ielem]] = allelems[i, 2+self.ndtags:2+self.ndtags+self.nnodel[ielem]]-1
self.dtags[ielem, :self.ndtags] = allelems[i, 2:2+self.ndtags]
ielem += 1
elif allelems[i,0] == 3:
# P1 quad
self.nnodel[ielem] = 4
self.nfael[ielem] = 4
self.inpoel[ielem, :self.nnodel[ielem]] = allelems[i, 2+self.ndtags:2+self.ndtags+self.nnodel[ielem]]-1
self.dtags[ielem, :self.ndtags] = allelems[i, 2:2+self.ndtags]
ielem += 1
elif allelems[i,0] == 10:
# P2 quad
self.nnodel[ielem] = 9
self.nfael[ielem] = 4
self.inpoel[ielem, :self.nnodel[ielem]] = allelems[i, 2+self.ndtags:2+self.ndtags+self.nnodel[ielem]]-1
self.dtags[ielem, :self.ndtags] = allelems[i, 2:2+self.ndtags]
ielem += 1
else:
print("! readGmsh(): ! Invalid element type!")
if ielem != self.nelem or iface != self.nbface:
print("Mesh2d: readGmsh(): ! Error in adding up!")
#@jit(nopython=True, cache=True)
def createMesh(npoin, nelem, nbface, maxnnodel, maxnnofa, nbtags, ndtags, coords, inpoel, bface, nnodel, nfael, nnofa, dtags):
# Create a compiled Mesh2d object
m = Mesh2d(npoin, nelem, nbface, maxnnodel, maxnnofa, nbtags, ndtags, coords, inpoel, bface, nnodel, nfael, nnofa, dtags)
return m
if __name__ == "__main__":
mio = Mesh2dIO()
mio.readGmsh("../Meshes-and-geometries/squarehole0.msh")
m = Mesh2d(mio.npoin, mio.nelem, mio.nbface, mio.maxnnodel, mio.maxnnofa, mio.nbtags, mio.ndtags, mio.coords, mio.inpoel, mio.bface, mio.nnodel, mio.nfael, mio.nnofa, mio.dtags)
mio = 0
print(numba.typeof(m))
<file_sep>/libfem/coeff_functions.py
class CoeffFunctions:
""" A set of functions describing a second-order PDE """
def __init__(self, rhs_func, stiff_coeff_func, mass_coeff_func, dirichlet_func, const1):
self.rhs = rhs_func
self.stiffness = stiff_coeff_func
self.mass = mass_coeff_func
self.dirichlet = dirichlet_func
self.const1 = const1
<file_sep>/libfem/elements.py
""" @brief Setup for finite elements
"""
import numpy as np
from numba import jit, generated_jit, jitclass, int64, float64
from .mesh import *
from .quadrature import GLQuadrature1D, GLQuadrature2DTriangle
mapspec = [('degree', int64), ('phynodes', float64[:,:])]
class GeometricMap:
""" @brief Abstract class for mapping between a physical element and a reference element.
"""
def setDegree(self, deg):
self.degree = deg
def setPhysicalElementNodes(self, physical_nodes):
""" physical_nodes is a nnodel x ndim numpy array describing locations of the physical nodes."""
self.phynodes = np.copy(physical_nodes)
def evalGeomMapping(self,x,y):
""" Returns the physical coordinates of reference location (x,y) in the reference element.
"""
pass
def getJacobian(self, x, y, jac, jacinv):
""" The ndim x ndim array jac contains the Jacobian matrix of the geometric mapping on return
and jacinv contains its inverse, evaluated at the reference coordinates (x,y).
Returns the value of the determinant.
"""
pass
def getJacobianDeterminant(self, x):
""" Returns the values of the determinant at a set of points x
"""
pass
#@jitclass(mapspec)
class LagrangeTriangleMap(GeometricMap):
""" @brief Mapping from reference to physical element based on Lagrange basis functions.
"""
def __init__(self):
pass
def evalGeomMapping(self,x,y):
rg = np.zeros(2)
if self.degree == 1:
rg[:] = self.phynodes[0,:]*(1.0-x-y) + self.phynodes[1,:]*x + self.phynodes[2,:]*y
elif self.degree == 2:
rg[:] = self.phynodes[0,:]*(1.0-3*x-3*y+2*x*x+2*y*y+4*x*y) \
+ self.phynodes[1,:]*(2.0*x*x-x) + self.phynodes[2,:]*(2.0*y*y-y) \
+ self.phynodes[3,:]*4.0*(x-x*x-x*y) + self.phynodes[4,:]*4.0*x*y \
+ self.phynodes[5,:]*4.0*(y-y*y-x*y)
return (rg[0],rg[1])
def getJacobian(self, x, y, jac, jacinv):
if self.degree == 1:
jac[:,0] = self.phynodes[1,:]-self.phynodes[0,:]
jac[:,1] = self.phynodes[2,:]-self.phynodes[0,:]
jdet = jac[0,0]*jac[1,1] - jac[0,1]*jac[1,0]
jacinv[0,0] = jac[1,1]/jdet; jacinv[0,1] = -jac[0,1]/jdet
jacinv[1,0] = -jac[1,0]/jdet; jacinv[1,1] = jac[0,0]/jdet
elif self.degree == 2:
jac[:,0] = self.phynodes[0,:]*(-3+4*x+4*y) +self.phynodes[1,:]*(4*x-1) \
+self.phynodes[3,:]*4*(1-2*x-y) +self.phynodes[4,:]*4*y -self.phynodes[5,:]*4.0*y
jac[:,1] = self.phynodes[0,:]*(-3+4*y+4*x) +self.phynodes[2,:]*(4*y-1) \
-self.phynodes[3,:]*4*x +self.phynodes[4,:]*4*x +self.phynodes[5,:]*4*(1-2*y-x)
jdet = jac[0,0]*jac[1,1] - jac[0,1]*jac[1,0]
jacinv[0,0] = jac[1,1]/jdet; jacinv[0,1] = -jac[0,1]/jdet
jacinv[1,0] = -jac[1,0]/jdet; jacinv[1,1] = jac[0,0]/jdet
return jdet
def getJacobianDeterminant(self, x):
# Does not work yet
jdet = np.zeros(x.shape[0], dtype=np.float64)
if self.degree == 1:
jac = np.zeros((2,2),dtype=np.float64)
jac[:,0] = self.phynodes[1,:]-self.phynodes[0,:]
jac[:,1] = self.phynodes[2,:]-self.phynodes[0,:]
jdet[:] = jac[0,0]*jac[1,1] - jac[0,1]*jac[1,0]
elif self.degree == 2:
jac = np.zeros((x.shape[0],2,2),dtype=np.float64)
jac[:,:,0] = np.outer(-3+4*x[:,0]+4*x[:,1],self.phynodes[0,:]) \
+np.outer(4*x[:,0]-1, self.phynodes[1,:]) \
+np.outer(4*(1-2*x[:,0]-x[:,1]), self.phynodes[3,:]) \
+np.outer(4*x[:,1], self.phynodes[4,:]) \
-np.outer(4.0*x[:,1], self.phynodes[5,:])
jac[:,:,1] = self.phynodes[0,:]*(-3+4*y+4*x) \
+self.phynodes[2,:]*(4*y-1) \
-self.phynodes[3,:]*4*x \
+self.phynodes[4,:]*4*x \
+self.phynodes[5,:]*4*(1-2*y-x)
jdet[:] = jac[:,0,0]*jac[:,1,1] - jac[:,0,1]*jac[:,1,0]
return jdet
elemspec = [('degree', int64), ('ndof', int64)]
class Element:
""" @brief Abstract class for a finite element
with basis functions defined on the reference element.
Members are
nnodel: number of nodes in the element
phynodes: locations of physical nodes
"""
def setDegree(self, deg):
""" Set the polynomial degree of trial and test basis functions.
Must be overriden by child classes for ndof computation."""
self.degree = deg
self.ndof = 1
def getBasisFunctions(self, x, y, bvals):
""" Returns the basis function value on the reference element
as a function of reference coordinates.
bvals must be preallocated as ndofs x 1.
On return, it contains values of the basis function at (x,y).
"""
pass
def getBasisGradients(self, x, y, bgrvals):
""" bgrvals must be ndofs x ndim. Contains partial derivatives of the basis functions on return.
"""
pass
#@jitclass(elemspec)
class LagrangeTriangleElement(Element):
""" Triangular element with Lagrange P1 basis for the trial/test space.
"""
def __init__(self):
pass
def setDegree(self, deg):
self.degree = deg
self.ndof = 1
if self.degree == 1:
self.ndof = 3
elif self.degree == 2:
self.ndof = 6
def getBasisFunctions(self, x, y, bvals):
if self.degree == 1:
bvals[:] = [1.0-x-y, x, y]
elif self.degree == 2:
bvals[0] = 1.0 - 3*x - 3*y + 2*x*x + 4*x*y + 2*y*y
bvals[1] = 2.0*x*x - x
bvals[2] = 2.0*y*y - y
bvals[3] = 4.0*(x - x*x - x*y)
bvals[4] = 4.0*x*y
bvals[5] = 4.0*(y - y*y - x*y)
def getBasisGradients(self, x, y, bgrvals):
if self.degree == 1:
bgrvals[0,:] = [-1.0, -1.0]
bgrvals[1,:] = [1.0, 0.0]
bgrvals[2,:] = [0.0, 1.0]
elif self.degree == 2:
bgrvals[0,:] = [-3.0+4.0*x+4.0*y, -3.0+4.0*x+4.0*y]
bgrvals[1,:] = [4.0*x-1.0, 0.0]
bgrvals[2,:] = [0.0, 4.0*y-1.0]
bgrvals[3,:] = [4.0*(1-2.0*x-y), -4.0*x]
bgrvals[4,:] = [4.0*y, 4.0*x]
bgrvals[5,:] = [-4.0*y, 4.0*(1.0-2.0*y-x)]
if __name__ == "__main__":
gm = LagrangeMap()
gm.setDegree(1)
pn = np.array([[0,1],[2,0],[0,2.0]])
gm.setPhysicalElementNodes(pn)
jac = np.zeros((2,2))
jacinv = np.zeros((2,2))
jacdet = gm.getJacobian(0,0.6,jac,jacinv)
print(jac)
elem = LagrangeTriangleElement()
elem.setDegree(2)
bvals = np.zeros(6)
elem.getBasisFunctions(0,0.6, bvals)
print(bvals)
|
cc9f38e6641a3f19b5569f52e5c79ba6875c501f
|
[
"Markdown",
"Python"
] | 15
|
Python
|
Slaedr/EllipticFEM2D
|
e83b31c800acde810572a85651f9342de223bc12
|
d373795f1ce874990c7e01ff88d71d9ab7a5ebf8
|
refs/heads/master
|
<file_sep>import React from 'react'
import {StyleSheet, View, Text} from 'react-native'
import ViewPager from '@react-native-community/viewpager'
export default function MyPager () {
return (
<ViewPager style={styles.viewPager} initialPage={0}>
<View key="1" style={styles.viewOne}>
<Text>First page</Text>
</View>
<View key="2" style={styles.viewTwo}>
<Text>Second page</Text>
</View>
</ViewPager>
)
}
const styles = StyleSheet.create({
viewPager: {
flex: 1
},
viewOne: {
backgroundColor: 'red'
},
viewTwo: {
backgroundColor: 'blue'
}
})
|
256858e1c3c56d73b50d0214143c21d1639ff30e
|
[
"JavaScript"
] | 1
|
JavaScript
|
coffescript/react-native-swiper
|
cf5eda5a94195b65e74a7fa07bebc3c6ee77d1e0
|
29ede7db460a6ba20d9420ec61f25a3932bb5a04
|
refs/heads/main
|
<file_sep>function modalPlus() {
var modal = document.getElementById("modalPlus");
modal.style.display = "block";
}
function closeModalEdit() {
var modal = document.getElementById("modalEdit");
modal.style.display = "none";
var text = document.getElementById("textEdit");
text.style.display = "none";
}
function closeModal() {
var modal = document.getElementById("modalPlus");
modal.style.display = "none";
var text = document.getElementById("text");
text.style.display = "none";
id = document.getElementById('id');
userName = document.getElementById('name');
country = document.getElementById('country');
age = document.getElementById('age');
id.value = '';
userName.value = '';
country.value = '';
age.value = '';
}
var app = new function() {
this.el = document.getElementById('user');
this.id = ['01', '02', '03'];
this.name = ['Nastia', 'Julia', 'Samanta'];
this.countries = ['France', 'Germany', 'England'];
this.age = ['37', '20', '54'];
this.Count = function() {
};
this.FetchAll = function() {
var data = '';
if (this.id.length > 0) {
for (i = 0; i < this.countries.length; i++) {
data += '<tr>';
data += '<td>' + this.id[i] + '</td>';
data += '<td>' + this.name[i] + '</td>';
data += '<td>' + this.countries[i] + '</td>';
data += '<td>' + this.age[i] + '</td>';
data += '<td class="edit"><button onclick="app.Edit(' + i + ')">Edit</button></td>';
data += '<td class="delete"><button onclick="app.Delete(' + i + ')">Delete</button></td>';
data += '</tr>';
}
}
this.Count(this.id.length);
return this.el.innerHTML = data;
};
this.Add = function () {
var modal = document.getElementById("text");
modal.style.display = "none";
id = document.getElementById('id');
userName = document.getElementById('name');
country = document.getElementById('country');
age = document.getElementById('age');
var one = id.value,
two = userName.value,
three = country.value,
four = age.value;
if (one && two && three && four) {
this.id.push(one.trim());
this.name.push(two.trim());
this.countries.push(three.trim());
this.age.push(four.trim());
id.value = '';
userName.value = '';
country.value = '';
age.value = '';
this.FetchAll();
}
else {
var modal = document.getElementById("text");
modal.style.display = "block";
}
};
this.Edit = function (item) {
var modal = document.getElementById("textEdit");
modal.style.display = "none";
var id = document.getElementById('edit-id'),
userName = document.getElementById('edit-name'),
country = document.getElementById('edit-country'),
age = document.getElementById('edit-age');
id.value = this.id[item];
userName.value = this.name[item];
country.value = this.countries[item];
age.value = this.age[item];
var modal = document.getElementById("modalEdit");
modal.style.display = "block";
self = this;
document.getElementById('saveEdit').onsubmit = function() {
var one = id.value,
two = userName.value,
three = country.value,
four = age.value;
if (one && two && three && four) {
self.id.splice(item, 1, one.trim());
self.name.splice(item, 1, two.trim());
self.countries.splice(item, 1, three.trim());
self.age.splice(item, 1, four.trim());
self.FetchAll();
closeModalEdit()
}
else {
var modal = document.getElementById("textEdit");
modal.style.display = "block";
}
}
};
this.Delete = function (item) {
this.id.splice(item, 1);
this.name.splice(item, 1);
this.countries.splice(item, 1);
this.age.splice(item, 1);
this.FetchAll();
};
}
app.FetchAll();
|
e6f5733dd4a0ca1a2ed9bc3c9cb4cd2168038ddf
|
[
"JavaScript"
] | 1
|
JavaScript
|
NatyaBA/CRUD
|
4da24f1041a4815691850f8ca56ab97a59d705be
|
209d8cde84140b7f7e3e9059a1673d872f033558
|
refs/heads/master
|
<file_sep># snow-probability-map
The 'Snow Probability Map' is a simple web-based tool for showing maps of snow probability in Czechia based on remote sensing,
crowdsourcing and meteorological station data.
The crowdsourced data (volunteered geographic information) comes from the Strava, Garmin Connect and in-pocasi.cz
social networks.
<file_sep>from tethys_apps.base import TethysAppBase, url_map_maker
from tethys_apps.base import PersistentStore
class SnowProbability(TethysAppBase):
"""
Tethys app class for Snow Probability.
"""
name = 'Snow Probability Map'
index = 'snow_probability:home'
icon = 'snow_probability/images/icon.gif'
package = 'snow_probability'
root_url = 'snow-probability'
color = '#9b59b6'
description = 'Check the snow cover probability in Czechia.'
def url_maps(self):
"""
Add controllers
"""
UrlMap = url_map_maker(self.root_url)
url_maps = (UrlMap(name='home',
url='snow-probability',
controller='snow_probability.map.map'),
UrlMap(name='snow_graph',
url='snow-probability/snow_graph',
controller='snow_probability.controllers.snow_graph'),
UrlMap(name='snow_data',
url='snow-probability/snow_data',
controller='snow_probability.modis.get_data_json'),
UrlMap(name='waterml',
url='snow-probability/waterml',
controller='snow_probability.modis.get_data_waterml'),
UrlMap(name='pixel',
url='snow-probability/pixel',
controller='snow_probability.modis.get_data_for_pixel'),
UrlMap(name='pixel-borders',
url='snow-probability/pixel-borders',
controller='snow_probability.modis.get_pixel_borders2'),
UrlMap(name='upload_to_hydroshare_ajax',
url='snow-probability/upload-to-hydroshare',
controller='snow_probability.controllers.upload_to_hydroshare')
)
return url_maps
def persistent_stores(self):
"""
Add one or more persistent stores
"""
stores = (PersistentStore(name='snow_probability_db',
initializer='init_stores:init_snow_probability_db',
spatial=True),
)
return stores
|
73d38b35a0cba2d8d159234bc0f923b2360a9902
|
[
"Markdown",
"Python"
] | 2
|
Markdown
|
jirikadlec2/snow-probability-map
|
342d0bf4dd3a9729f5c13d050dd3da2319834b7a
|
f341274c31f048688908093fb8e48954fdf59ce9
|
refs/heads/master
|
<repo_name>seimmuc/antispambot<file_sep>/main.py
import json
import os
import sqlite3
import time
from typing import List, Dict, Any, Tuple, Optional
from discord import Client, Status, Member, Message, Intents, Guild, TextChannel, ChannelType, PartialMessage
class ActivityLimit:
def __init__(self, time_window: float, count_limit: int):
self.time_window = time_window
self.count_limit = count_limit
class Record:
def __init__(self, timestamp: float, action_name: str, initiator: str = None, data=None):
self.timestamp = timestamp
self.action_name = action_name
self.initiator = initiator
self.data = data
class RecentActivity:
def __init__(self, db_con: sqlite3.Connection, table_name: str, limit: ActivityLimit):
self.db_con = db_con
self.table_name = table_name
self.activity_limit = limit
self._last_fetched_times = {}
@staticmethod
def _sql_where_clause(conditions: Dict[str, Tuple[str, Any]]) -> Tuple[str, List[Any]]:
# conditions: Dict[column_name, Tuple[condition_operator, comparison_value]]
where_clause, where_values = zip(*((f'"{k}" {v[0]} ?', v[1]) for k, v in conditions.items()))
where_clause = 'WHERE ' + ' AND '.join(where_clause)
return where_clause, where_values
def add_record(self, guild_id: int, **data):
names = ['guild_id', 'unix_time']
values = [guild_id, time.time()]
for k, v in data.items():
names.append(f'"{k}"')
values.append(v)
with self.db_con:
self.db_con.execute(
f'INSERT INTO {self.table_name} ({", ".join(names)}) VALUES ({", ".join("?" for _ in names)})',
values)
def purge_old(self):
timestamp = time.time() - self.activity_limit.time_window
with self.db_con:
self.db_con.execute(f'DELETE FROM {self.table_name} WHERE unix_time < ?', [timestamp])
def over_limit(self, guild_id: int, purge: bool, **match_conditions) -> bool:
if purge:
self.purge_old()
match_conditions['guild_id'] = guild_id
where_clause, where_values = self._sql_where_clause({k: ('=', v) for k, v in match_conditions.items()})
with self.db_con as con:
c = con.execute(f'SELECT COUNT (unix_time) FROM {self.table_name} {where_clause}', where_values)
row_count = c.fetchone()[0]
return row_count > self.activity_limit.count_limit
def fetch_recent_records_once(self, guild_id: int, purge: bool, columns: List[str] = None, **match_conditions)\
-> list:
if purge:
self.purge_old()
match_conditions['guild_id'] = guild_id
# this is not thread safe, but is fine with asyncio
last_fetched_key = f'{self.table_name}:{"|".join(f"{k}={v}" for k,v in match_conditions.items())}'
last_fetch_time = self._last_fetched_times.get(last_fetched_key, 0)
self._last_fetched_times[last_fetched_key] = time.time()
conditions = {k: ('=', v) for k, v in match_conditions.items()}
conditions['unix_time'] = ('>', last_fetch_time)
where_clause, where_values = self._sql_where_clause(conditions)
columns = '*' if columns is None else ', '.join(f'"{c}"' for c in columns)
with self.db_con as con:
c = con.execute(f'SELECT {columns} FROM {self.table_name} {where_clause}', where_values)
return c.fetchall()
class AntiSpamBot(Client):
def __init__(self, config: Dict[str, Any], db_con: sqlite3.Connection, *args, **kwargs):
intents = Intents(members=True, messages=True, guilds=True)
super().__init__(intents=intents, *args, **kwargs)
self.db_con = db_con
self.enable_message_limit = config.get('ENABLE_MESSAGE_LIMIT', None) is True
self.message_activity = RecentActivity(db_con=db_con, table_name='Message',
limit=ActivityLimit(**config['MESSAGE_LIMIT']))
self.join_activity = RecentActivity(db_con=db_con, table_name='GuildJoin',
limit=ActivityLimit(**config['JOIN_LIMIT']))
async def on_ready(self):
print(f'We have logged in as {self.user}')
async def on_message(self, message: Message):
if not self.enable_message_limit:
return
if message.author.id == self.user.id:
return
if message.channel.type != ChannelType.text:
return
guild: Guild = message.guild
user_id = message.author.id
self.message_activity.add_record(guild_id=guild.id, user_id=user_id, channel_id=message.channel.id,
message_id=message.id)
if self.message_activity.over_limit(guild_id=guild.id, purge=True, user_id=user_id):
print('hit the text message limit')
await message.channel.send(content='hit the text message limit')
del_msgs: List[sqlite3.Row] = self.message_activity\
.fetch_recent_records_once(guild_id=guild.id, purge=False, columns=['channel_id', 'message_id'],
user_id=user_id)
if len(del_msgs) == 1:
msg = guild.get_channel(del_msgs[0]['channel_id']).get_partial_message(del_msgs[0]['message_id'])
await msg.delete()
elif len(del_msgs) > 1:
del_msgs_by_channel: Dict[int, Tuple[TextChannel, List[PartialMessage]]] =\
{ci: (guild.get_channel(ci), []) for ci in set(m['channel_id'] for m in del_msgs)}
for msg in del_msgs:
ch, ml = del_msgs_by_channel[msg['channel_id']]
ml.append(ch.get_partial_message(msg['message_id']))
for channel, messages in del_msgs_by_channel.values():
await channel.delete_messages(messages)
async def on_member_join(self, member: Member):
print(f'{member.name} joined')
guild: Guild = member.guild
self.join_activity.add_record(guild_id=guild.id, user_id=member.id)
if self.join_activity.over_limit(guild_id=guild.id, purge=True):
print('hit the join limit')
kick_users: List[sqlite3.Row] = self.join_activity\
.fetch_recent_records_once(guild_id=guild.id, purge=False, columns=['user_id'])
for user_row in kick_users:
user: Optional[Member] = member if user_row['user_id'] == member.id \
else guild.get_member(user_row['user_id'])
if user is not None:
await user.kick(reason='raid protection')
async def on_member_remove(self, member: Member):
print(f'{member.name} left')
def init_db(db_con: sqlite3.Connection):
init_script = """
CREATE TABLE Message (guild_id integer, unix_time real, user_id integer, channel_id integer, message_id integer);
CREATE TABLE GuildJoin (guild_id integer, unix_time real, user_id integer);
"""
with db_con:
db_con.executescript(init_script)
def start_bot(config, db_con):
bot_token = os.getenv('ANTISPAMBOT_TOKEN')
if bot_token is None:
print('missing ANTISPAMBOT_TOKEN environment variable')
raise NoTokenError()
bot = AntiSpamBot(config=config, db_con=db_con, status=Status.idle)
bot.http.user_agent = 'AntiSpamBot/0.1'
bot.run(bot_token, bot=True)
class NoTokenError(RuntimeError):
pass
if __name__ == '__main__':
with open('config.json', 'rt', encoding='utf8') as f:
bot_config = json.load(f)
db = sqlite3.connect(":memory:")
db.row_factory = sqlite3.Row
init_db(db_con=db)
try:
start_bot(bot_config, db)
except NoTokenError:
exit(1)
|
649bf3352395870c3e9aff1c06feb4c4787c86b1
|
[
"Python"
] | 1
|
Python
|
seimmuc/antispambot
|
195d0ec5345f12186d79c821d232b0d3879d3056
|
1738bbfc8e2eebf833675befb0683201960b75ac
|
refs/heads/main
|
<file_sep>import {remote} from "electron";
import fs from "fs";
import path from "path";
import SendFeedback from "@electron-elements/send-feedback";
const {app} = remote;
customElements.define("send-feedback", SendFeedback);
export const sendFeedback: SendFeedback =
document.querySelector("send-feedback")!;
export const feedbackHolder = sendFeedback.parentElement!;
// Make the button color match zulip app's theme
sendFeedback.customStylesheet = "css/feedback.css";
// Customize the fields of custom elements
sendFeedback.title = "Report Issue";
sendFeedback.titleLabel = "Issue title:";
sendFeedback.titlePlaceholder = "Enter issue title";
sendFeedback.textareaLabel = "Describe the issue:";
sendFeedback.textareaPlaceholder =
"Succinctly describe your issue and steps to reproduce it...";
sendFeedback.buttonLabel = "Report Issue";
sendFeedback.loaderSuccessText = "";
sendFeedback.useReporter("emailReporter", {
email: "<EMAIL>",
});
feedbackHolder.addEventListener("click", (event: Event) => {
// Only remove the class if the grey out faded
// part is clicked and not the feedback element itself
if (event.target === event.currentTarget) {
feedbackHolder.classList.remove("show");
}
});
sendFeedback.addEventListener("feedback-submitted", () => {
setTimeout(() => {
feedbackHolder.classList.remove("show");
}, 1000);
});
sendFeedback.addEventListener("feedback-cancelled", () => {
feedbackHolder.classList.remove("show");
});
const dataDir = app.getPath("userData");
const logsDir = path.join(dataDir, "/Logs");
sendFeedback.logs.push(
...fs.readdirSync(logsDir).map((file) => path.join(logsDir, file)),
);
|
3374f2f427a5bf0122ab5d5e406057d1f51f8afc
|
[
"TypeScript"
] | 1
|
TypeScript
|
dulianshuizhongyue/zulip-desktop
|
90a65ab6ccf3a5f165cb57caf1ddd3a0b26cd894
|
d3f07e65040789ff3e2958d4228f5f9544a14d41
|
refs/heads/master
|
<repo_name>akhadimer/TD-scripting<file_sep>/moreorless.sh
#!/bin/bash
nbrrandom=$(( ( RANDOM % 10 ) ))
i=1
read nbr
while [ $nbr != $nbrrandom ]
do
if [ $nbr -le $nbrrandom ]
then
echo "Plus"
else
echo "Moins"
fi
read nbr
i=`expr $i + 1`
done;
if [ $nbr -eq $nbrrandom ]
then
echo "Trouvé après $i essai(s)"
fi
|
343aa2c0493f48732b4054e619659cd4cedfb479
|
[
"Shell"
] | 1
|
Shell
|
akhadimer/TD-scripting
|
a8214667c9f903f068ec9f14b73f44480dd992bd
|
1a59cd4c1a1d4eea6278408cd05f6a66b05905e0
|
refs/heads/master
|
<repo_name>achmadsyafri/resume-ku<file_sep>/src/components/Menu.jsx
import React from 'react';
import { makeStyles } from '@material-ui/core/styles';
import Grid from '@material-ui/core/Grid';
// import Button from '@material-ui/core/Button';
// import Tooltip from '@material-ui/core/Tooltip';
import InstagramIcon from '@material-ui/icons/Instagram';
import LinkedInIcon from '@material-ui/icons/LinkedIn';
import FacebookIcon from '@material-ui/icons/Facebook';
import TwitterIcon from '@material-ui/icons/Twitter';
import TopNavigationAction from '@material-ui/core/BottomNavigationAction';
const useStyles = makeStyles({
root: {
width: 400,
paddingTop: 0,
paddingBottom: 0
},
});
export default function PositionedTooltips() {
const classes = useStyles();
return (
<div className={classes.root}>
<Grid item xs={10} md={12}>
<TopNavigationAction href="https://www.instagram.com/achmadsyafri/" label="Instagram" icon={<InstagramIcon className={classes.icon}/>} />
<TopNavigationAction href="https://www.linkedin.com/in/achmadsyafri96/" label="Linkedin" icon={<LinkedInIcon className={classes.icon}/>} />
<TopNavigationAction href="https://twitter.com/AchmadSyafri" label="Twitter" icon={<TwitterIcon className={classes.icon}/>} />
<TopNavigationAction href="https://www.facebook.com/achmadsyafri11" label="Facebook" icon={<FacebookIcon className={classes.icon}/>} />
</Grid>
</div>
);
}
|
363756eba6b87743a7ccf34783bd301ff2ad377a
|
[
"JavaScript"
] | 1
|
JavaScript
|
achmadsyafri/resume-ku
|
cb09424033b3115d64e8429c149f1733b4634fbf
|
391d2f17c202ac24b3d0550ef33ca4864cd63537
|
refs/heads/main
|
<repo_name>marto123456/python-networking<file_sep>/code.py
import marton
servers_login = marton.login('192.168.100.1', 22, 'martins', 'cisco')
server_shell = marton.platform_shell(servers_login)
marton.enter_command(server_shell, 'enable')
marton.enter_command(server_shell, 'cisco')
marton.enter_command(server_shell, 'terminal length 0')
result1 = marton.enter_command(server_shell, 'show run')
print(result1.decode())
marton.close(servers_login)
<file_sep>/marton.py
import paramiko
import time
def login(ip_address, port, username, password):
cl = paramiko.SSHClient()
cl.set_missing_host_key_policy(paramiko.AutoAddPolicy())
cl.connect(ip_address, port=port, username=username, password=<PASSWORD>, look_for_keys=False, allow_agent=False)
return cl
def platform_shell(cl):
shell = cl.invoke_shell()
return shell
def enter_command(shell, command):
shell.send(command + '\n')
time.sleep(2)
result = shell.recv(4096)
return result
def close(cl):
if cl.get_transport().is_active():
cl.close()
<file_sep>/README.md
# python-networking
Marton is a mini Python Networking Scripts that uses a python module called marton.
This module was actually created by me from the paramiko module and it aims to help network engineers connect and issue commands onto a cisco/linux router using SSH.
PREREQUISIES
1. Make sure your cisco router has been configured with ip address and ssh enabled.
2. Ensure that you can reach your router
3.Install the paramiko module
You can clone this project into pycharm and update the username and password values. After which you also update the values you want to send to the router.
Run this command and get your result
|
64246dd1915aa9322bd7ae921a572b0525b6468e
|
[
"Markdown",
"Python"
] | 3
|
Python
|
marto123456/python-networking
|
accd72e5641b56d2f7f5d3cd9e70534b65c8e582
|
16a7284408f1a031147f8736b123055f7c11696b
|
refs/heads/main
|
<file_sep><?php
use Faker\Factory;
use App\Entity\Books;
use Doctrine\Persistence\ObjectManager;
use Doctrine\Bundle\FixturesBundle\Fixture;
class BookFixtures extends Fixture
{
public function load(ObjectManager $manager)
{
$faker = Factory::create('FR-fr');
for($a =1; $a<=10; $a++){
$books = new Books();
$title = $faker->sentence($nbWords = 1, $variableNbWords = true);
$author = $faker->sentence($nbWords = 1, $variableNbWords = true);
$genre = $faker->sentence($nbWords = 1, $variableNbWords = true);
$resume = '<p>'.join('</p><p>',$faker->paragraphs(1)).'</p>';
$format = $faker->sentence($nbWords = 1, $variableNbWords = true);
$date = $faker->year($max = 'now');
$books->setTitle($title)
->setCoverImage('https://picsum.photos/150/350')
->setAuthor($author)
->setGenre($genre)
->setResume($resume)
->setFormat($format)
->setDate($date)
->setPrice(rand(6,15))
;
$manager->persist($books);
}
$manager->flush();
}
}
<file_sep><?php
namespace App\Entity;
use App\Repository\BooksRepository;
use Doctrine\Common\Collections\ArrayCollection;
use Doctrine\Common\Collections\Collection;
use Doctrine\ORM\Mapping as ORM;
/**
* @ORM\Entity(repositoryClass=BooksRepository::class)
*/
class Books
{
/**
* @ORM\Id
* @ORM\GeneratedValue
* @ORM\Column(type="integer")
*/
private $id;
/**
* @ORM\Column(type="string", length=255)
*/
private $title;
/**
* @ORM\Column(type="string", length=255)
*/
private $author;
/**
* @ORM\Column(type="text")
*/
private $resume;
/**
* @ORM\Column(type="string", length=255)
*/
private $genre;
/**
* @ORM\Column(type="string", length=255)
*/
private $coverImage;
/**
* @ORM\Column(type="float")
*/
private $price;
/**
* @ORM\Column(type="string", length=255)
*/
private $slug;
/**
* @ORM\Column(type="date")
*/
private $date;
/**
* @ORM\Column(type="string", length=255)
*/
private $format;
/**
* @ORM\OneToMany(targetEntity=Image::class, mappedBy="livres")
*/
private $images;
public function __construct()
{
$this->images = new ArrayCollection();
}
public function getId(): ?int
{
return $this->id;
}
public function getTitle(): ?string
{
return $this->title;
}
public function setTitle(string $title): self
{
$this->title = $title;
return $this;
}
public function getAuthor(): ?string
{
return $this->author;
}
public function setAuthor(string $author): self
{
$this->author = $author;
return $this;
}
public function getResume(): ?string
{
return $this->resume;
}
public function setResume(string $resume): self
{
$this->resume = $resume;
return $this;
}
public function getGenre(): ?string
{
return $this->genre;
}
public function setGenre(string $genre): self
{
$this->genre = $genre;
return $this;
}
public function getCoverImage(): ?string
{
return $this->coverImage;
}
public function setCoverImage(string $coverImage): self
{
$this->coverImage = $coverImage;
return $this;
}
public function getPrice(): ?float
{
return $this->price;
}
public function setPrice(float $price): self
{
$this->price = $price;
return $this;
}
public function getSlug(): ?string
{
return $this->slug;
}
public function setSlug(string $slug): self
{
$this->slug = $slug;
return $this;
}
public function getDate(): ?\DateTimeInterface
{
return $this->date;
}
public function setDate(\DateTimeInterface $date): self
{
$this->date = $date;
return $this;
}
public function getFormat(): ?string
{
return $this->format;
}
public function setFormat(string $format): self
{
$this->format = $format;
return $this;
}
/**
* @return Collection|Image[]
*/
public function getImages(): Collection
{
return $this->images;
}
public function addImage(Image $image): self
{
if (!$this->images->contains($image)) {
$this->images[] = $image;
$image->setLivres($this);
}
return $this;
}
public function removeImage(Image $image): self
{
if ($this->images->removeElement($image)) {
// set the owning side to null (unless already changed)
if ($image->getLivres() === $this) {
$image->setLivres(null);
}
}
return $this;
}
}
|
2d0b1d0e74fd317e16a0d791bf93d48d6bb9415f
|
[
"PHP"
] | 2
|
PHP
|
noemie96/le-bouquineur
|
c7b94e8da2b4f7eff4aba8c1c1e5796995814d71
|
a9ebac6332db71e5f375b40ef759573a71eafcd0
|
refs/heads/master
|
<file_sep>/* selfedu ino, EchoLocator v.0.1
* measure distance by obstacle in cm
* sending values to serial and display via I2C
* alarming obstacle closer < 20cm
*/
#include <LiquidCrystal_I2C.h>
LiquidCrystal_I2C lcd(0x27, 16, 2);
const int trigPin = 9; //triggerPin on ultrasonic SR04
const int echoPin = 10; //echoPin on ultrasonic SR04
const int piskle = 11; //buzzer; H if obstacle distance < then safetyDistance
const int ledPin = 12; //led ; H if obstacle distance < then safetyDistance
int x = 20; //distance of alerting <= in cm
long duration;
int distance;
int safetyDistance;
void setup() {
pinMode(trigPin, OUTPUT);
pinMode(echoPin, INPUT);
pinMode(piskle, OUTPUT);
pinMode(ledPin, OUTPUT);
Serial.begin(9600); //copy $distance to serial
lcd.begin();
lcd.noBacklight();
lcd.clear();
delay(500);
lcd.backlight();
lcd.home();
lcd.print("EchoLoc v.0.1");
lcd.setCursor(0,1);
lcd.print("OK1VBR (cl)2019");
delay(1000);
lcd.clear();
}
void loop() {
// clears the trigPin+transmit+calculate distance
digitalWrite(trigPin, LOW);
delayMicroseconds(2);
digitalWrite(trigPin, HIGH);
delayMicroseconds(10);
digitalWrite(trigPin, LOW);
duration = pulseIn(echoPin, HIGH);
distance = duration*0.034/2;
safetyDistance = distance;
//safetyDistance is in (cm)
if (safetyDistance <= x){
digitalWrite(piskle, HIGH);
digitalWrite(ledPin, HIGH);
}
else {
digitalWrite(piskle, LOW);
digitalWrite(ledPin, LOW);
}
//send EchoLoc to serial
Serial.print("Distance: ");
Serial.println(distance);
//if Echo signal is lost or distance >33m
if (distance >=3300) {
lcd.clear();
lcd.setCursor(0, 0);
lcd.print("Lost of echo ");
lcd.setCursor(0, 1);
lcd.print("or distance >33m");
}
//else print to display value of Echo
else
lcd.clear();
lcd.print("DISTANCE: ");
lcd.print(distance);
lcd.print("cm");
delay(50);
}
<file_sep># Distance-meter-demo
ultrasonic meter and alerting edu device
based on arduino.
like as parking radar etc.
|
0840cee4cf9b5349d8e62fe8df83a58926f95edf
|
[
"Markdown",
"C++"
] | 2
|
C++
|
radarixos/Distance-meter-demo
|
bfc089c83578704d3d3f48e9f503d76f40ac15dc
|
9904871d708df142be18bbd6b48fd4c39b1d6776
|
refs/heads/master
|
<file_sep>using System;
namespace Sanofi.Core
{
public class Class1
{
}
}
<file_sep>using Sanofi.Core;
using Sanofi.Core.EntitiesModel.Administrator;
using Sanofi.Core.EntitiesModel.IdentityCore;
using Sanofi.Infrastructure.DbContext;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Dynamic;
using System.Linq;
using System.Security.Principal;
using System.Threading.Tasks;
namespace Abhimantra.Sanofi.Base
{
public abstract class BaseRepository : IDisposable
{
protected readonly ApplicationDbContext context;
protected readonly IPrincipal principal;
//protected readonly string ReportBaseUrl = "http://172.16.58.3:8085/";
private readonly GlobalVariableParamModel _globalVariable;
private readonly string _documentManagementSetting = AppSettingJson.GetDocumentManagementSetting();
protected readonly string safepediaScheme = HttpHelper.HttpContext?.Request.Scheme;
protected readonly string safepediaHost = HttpHelper.HttpContext?.Request.Host.Value;
protected BaseRepository(ApplicationDbContext context, IPrincipal principal, GlobalVariableParamModel globalVariable)
{
this.context = context;
this.principal = principal;
this._globalVariable = globalVariable;
}
public string ReportBaseUrl
{
get
{
return this._globalVariable.ReportBaseUrl;
}
}
public string BaseAwsDirectoryDomain
{
get
{
var documentManagementHost = AppSettingJson.GetDocumentManagementHost();
if (!string.IsNullOrEmpty(documentManagementHost))
{
var documentManagemenSetting = AppSettingJson.GetDocumentManagementSetting();
if (documentManagemenSetting?.ToLower() == "sensenet")
{
var documentManagemenSystemPath = AppSettingJson.GetDocumentManagementSystemPath();
return documentManagementHost + documentManagemenSystemPath;
}
else
{
return documentManagementHost;
}
}
return "https://s3-ap-southeast-1.amazonaws.com/";
}
}
public string BaseUrl
{
get
{
var url = $"{safepediaScheme}://{safepediaHost}";
return url;
}
}
public string CurrentUserId
{
get
{
try
{
var currentUser = context.Users.FirstOrDefault(user => user.UserName == CurrentUserName);
//return currentUser == null ? null : currentUser.Id;
if (currentUser != null)
{
return currentUser.Id;
}
else
{
var currentUserById = context.Users.FirstOrDefault(user => user.Id == CurrentUserName);
if (currentUserById != null)
{
return currentUserById.Id;
}
}
return null;
}
catch (Exception)
{
return null;
}
}
}
public string CurrentUserName
{
get
{
if (principal != null)
{
return principal.Identity.Name;
}
return null;
}
}
public IQueryable<ApplicationUser> AllUsers
{
get
{
return context.Users.Where(user => user.DelDate == null);
}
}
[DebuggerStepThrough]
protected void SetAuditFields(dynamic entity)
{
if (entity.CreatedDate == null)
{
if (IsPropertyExist(entity, "CreatedDate")) { entity.CreatedDate = DateTime.Now; };
if (IsPropertyExist(entity, "CreatedBy")) { entity.CreatedBy = CurrentUserId; };
if (IsPropertyExist(entity, "IsActived")) { entity.IsActived = true; };
}
else
{
if (IsPropertyExist(entity, "UpdatedDate")) { entity.UpdatedDate = DateTime.Now; };
if (IsPropertyExist(entity, "UpdatedBy")) { entity.UpdatedBy = CurrentUserId; };
}
}
public static bool IsPropertyExist(dynamic settings, string name)
{
if (settings is ExpandoObject)
return ((IDictionary<string, object>)settings).ContainsKey(name);
return settings.GetType().GetProperty(name) != null;
}
public void Dispose()
{
if (context == null)
return;
context.Dispose();
}
private static Random random = new Random();
public static string GetRandomString(int length)
{
var chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789-/";
var result = new string(
Enumerable.Repeat(chars, length)
.Select(s => s[random.Next(s.Length)])
.ToArray());
return result;
}
private IQueryable<Feature> AllFeature
{
get
{
return context.Feature.Where(doc => doc.DelDate == null);
}
}
public string InspectionFeatureName
{
get
{
var feature = AllFeature.FirstOrDefault(fet => fet.FeatureID == null && fet.AreaName == "Inspection" && fet.ControllerName == "Inspection" && fet.ActionName == "Index");
if (feature != null)
{
return feature.FeatureName;
}
return "Inspection";
}
}
public string TSVFeatureName
{
get
{
var feature = AllFeature.FirstOrDefault(fet => fet.AreaName == "TaskSelfVerification" && fet.ControllerName == "TaskSelfVerification" && fet.ActionName == "Index");
if (feature != null)
{
return feature.FeatureName;
}
return "Task Self Verification";
}
}
public string ShocardFeatureName
{
get
{
var feature = AllFeature.FirstOrDefault(fet => fet.AreaName == "Hazob" && fet.ControllerName == "Hazob" && fet.ActionName == "Index");
if (feature != null)
{
return feature.FeatureName;
}
return "Hazob";
}
}
public string AuditFeatureName
{
get
{
var feature = AllFeature.FirstOrDefault(fet => fet.AreaName == "Audit" && fet.ControllerName == "Audit" && fet.ActionName == "Index");
if (feature != null)
{
return feature.FeatureName;
}
return "Internal Audit";
}
}
public string NonconformityFeatureName
{
get
{
var feature = AllFeature.FirstOrDefault(fet => fet.AreaName == "Nonconformity" && fet.ControllerName == "Nonconformity" && fet.ActionName == "Index");
if (feature != null)
{
return feature.FeatureName;
}
return "Corrective Action Report";
}
}
public string ObservationFeatureName
{
get
{
var feature = AllFeature.FirstOrDefault(fet => fet.AreaName == "Observation" && fet.ControllerName == "Observation" && fet.ActionName == "Index");
if (feature != null)
{
return feature.FeatureName;
}
return "BBS";
}
}
public string PunchListFeatureName
{
get
{
var feature = AllFeature.FirstOrDefault(fet => fet.AreaName == "CorrectiveAction" && fet.ControllerName == "CorrectiveAction" && fet.ActionName == "Index");
if (feature != null)
{
return feature.FeatureName;
}
return "Action List";
}
}
public string SHEDailyReportFeatureName
{
get
{
var feature = AllFeature.FirstOrDefault(fet => fet.AreaName == "SHEReport" && fet.ControllerName == "SHEDailyReport" && fet.ActionName == "Index");
if (feature != null)
{
return feature.FeatureName;
}
return "SHE Daily Report";
}
}
public string IncidentFeatureName
{
get
{
var feature = AllFeature.FirstOrDefault(fet => fet.AreaName == "Incident" && fet.ControllerName == "Incident" && fet.ActionName == "Index");
if (feature != null)
{
return feature.FeatureName;
}
return "Incident";
}
}
public string UserFeatureName
{
get
{
var feature = AllFeature.FirstOrDefault(fet => fet.AreaName == "Identity" && fet.ControllerName == "User" && fet.ActionName == "Index");
if (feature != null)
{
return feature.FeatureName;
}
return "User Management";
}
}
public string HirarcFeatureName
{
get
{
var feature = AllFeature.FirstOrDefault(fet => fet.AreaName == "RiskManagement" && fet.ControllerName == "Hirardc" && fet.ActionName == "Index");
if (feature != null)
{
return feature.FeatureName;
}
return "HIRADC";// "Hirardc";
}
}
public string PTWFeatureName
{
get
{
var feature = AllFeature.FirstOrDefault(fet => fet.AreaName == "Meeting" && fet.ControllerName == "PermitToWork" && fet.ActionName == "Index");
if (feature != null)
{
return feature.FeatureName;
}
return "Permit To Work";
}
}
public string SafetyMeetingFeatureName
{
get
{
var feature = AllFeature.FirstOrDefault(fet => fet.AreaName == "Meeting" && fet.ControllerName == "SafetyMeeting" && fet.ActionName == "Index");
if (feature != null)
{
return feature.FeatureName;
}
return "Minutes of Meeting";
}
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations.Schema;
using System.Text;
using Microsoft.AspNetCore.Identity;
namespace Sanofi.Core.EntitiesModel.IdentityCore
{
public class ApplicationUser : IdentityUser
{
public string UserCode { get; set; } // Isi Nya NIK
public string FullName { get; set; }
public string Title { get; set; }
public string Gender { get; set; }
public Nullable<DateTime> Birthdate { get; set; }
public Nullable<DateTime> JoinDate { get; set; }
public string Status { get; set; }
public Nullable<DateTime> StatusEndDate { get; set; }
public string JobTitle { get; set; }
public string EmploymentType { get; set; }
public string ProfilePic { get; set; }
public Nullable<bool> IsActive { get; set; }
[ForeignKey("AppUserCreateBy")]
public string CreatedBy { get; set; }
public virtual ApplicationUser AppUserCreateBy { get; set; }
public Nullable<DateTime> CreatedDate { get; set; }
[ForeignKey("AppUserUpdateBy")]
public string UpdatedBy { get; set; }
public virtual ApplicationUser AppUserUpdateBy { get; set; }
public Nullable<DateTime> UpdatedDate { get; set; }
public Nullable<DateTime> DelDate { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Text;
using Microsoft.AspNetCore.Identity;
namespace Sanofi.Core.EntitiesModel.IdentityCore
{
public class ApplicationRole : IdentityRole
{
public string CustomRoleName { get; set; }
public string Description { get; set; }
public Nullable<bool> IsAdministrator { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.ComponentModel.DataAnnotations.Schema;
using System.Text;
namespace Sanofi.Core.EntitiesModel.Administrator
{
[Table("Feature", Schema = "Administrator")]
public class Feature : BaseEntity
{
[DisplayName("Feature Name")]
public string FeatureName { get; set; }
[DisplayName("Area Name")]
public string AreaName { get; set; }
[DisplayName("Controller Name")]
public string ControllerName { get; set; }
[DisplayName("Action Name")]
public string ActionName { get; set; }
[DisplayName("Menu Icon")]
public string MenuIcon { get; set; }
[DisplayName("IsMenu")]
public Nullable<bool> IsMenu { get; set; }
[DisplayName("Description")]
public string Description { get; set; }
public string FeatureID { get; set; }
public virtual Feature ParentMenu { get; set; }
public int Sequence { get; set; }
public int SequenceChild { get; set; }
}
}
<file_sep>using Microsoft.Extensions.Configuration;
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
namespace Sanofi.Core
{
public class AppSettingJson
{
public static IConfigurationRoot GetConfigurationSetting()
{
string applicationExeDirectory = Directory.GetCurrentDirectory();
var builder = new ConfigurationBuilder()
.SetBasePath(applicationExeDirectory)
.AddJsonFile("appsettings.json", optional: true, reloadOnChange: true);
return builder.Build();
}
public static string GetConnectionString()
{
var connectionString = GetConfigurationSetting()["ConnectionStrings:DefaultConnection"];
return connectionString;
}
public static string GetDocumentManagementSetting()
{
var setting = GetConfigurationSetting()["DocumentManagementSettings:DefaultSetting"];
return setting;
}
public static string GetDocumentManagementHost()
{
var host = GetConfigurationSetting()["DocumentManagementSettings:Host"];
return host;
}
public static string GetDocumentManagementSystemPath()
{
var systemPath = GetConfigurationSetting()["DocumentManagementSettings:SystemPath"];
return systemPath;
}
public static string[] GetDocumentManagementServerContextLogin()
{
var stringServerContextLogin = GetConfigurationSetting()["DocumentManagementSettings:ServerContextLogin"];
var serverContextLogin = stringServerContextLogin.Split(";");
if (serverContextLogin.Length < 2)
throw new Exception("Destination Server must be set first");
return serverContextLogin;
}
}
}
<file_sep>using Microsoft.AspNetCore.Identity.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore;
using Sanofi.Core;
using Sanofi.Core.EntitiesModel.Administrator;
using Sanofi.Core.EntitiesModel.IdentityCore;
using System;
using System.Collections.Generic;
using System.Text;
namespace Sanofi.Infrastructure.DbContext
{
public class ApplicationDbContext : IdentityDbContext<ApplicationUser, ApplicationRole, string>
{
public DbSet<Feature> Feature { get; set; }
public DbSet<RoleFeature> RoleFeature { get; set; }
public static string ConString = AppSettingJson.GetConnectionString();
protected override void OnConfiguring(DbContextOptionsBuilder option)
{
option.UseSqlServer(ConString);
}
public ApplicationDbContext(DbContextOptions<ApplicationDbContext> options)
: base(options)
{
}
public static ApplicationDbContext Create()
{
var optionsBuilder = new DbContextOptionsBuilder<ApplicationDbContext>();
optionsBuilder.UseSqlServer(ConString);
return new ApplicationDbContext(optionsBuilder.Options);
}
public ApplicationDbContext CreateDbContext(string[] args)
{
var optionsBuilder = new DbContextOptionsBuilder<ApplicationDbContext>();
optionsBuilder.UseSqlServer(ConString);
return new ApplicationDbContext(optionsBuilder.Options);
}
protected override void OnModelCreating(ModelBuilder builder)
{
base.OnModelCreating(builder);
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Security.Principal;
using System.Threading.Tasks;
using Abhimantra.Sanofi.Base;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Http.Features;
using Microsoft.AspNetCore.HttpsPolicy;
using Microsoft.AspNetCore.Identity;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Mvc.Authorization;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Hosting;
using Sanofi.Core;
using Sanofi.Core.EntitiesModel.IdentityCore;
using Sanofi.Infrastructure.DbContext;
namespace Abhimantra.Sanofi
{
public class Startup
{
public Startup(IHostingEnvironment env)
{
var builder = new ConfigurationBuilder()
.SetBasePath(env.ContentRootPath)
.AddJsonFile("appsettings.json", optional: true, reloadOnChange: true)
//.AddJsonFile($"appsettings.{env.EnvironmentName}.json", optional: true)
.AddEnvironmentVariables();
Configuration = builder.Build();
environment = env;
}
public IConfiguration Configuration { get; }
public IHostingEnvironment environment { get; set; }
// This method gets called by the runtime. Use this method to add services to the container.
public void ConfigureServices(IServiceCollection services)
{
services.AddControllers().AddNewtonsoftJson();
services.Configure<FormOptions>(options =>
{
options.MultipartBodyLengthLimit = 100 * 1024 * 1024;
});
services.AddMvc(o =>
{
var policy = new AuthorizationPolicyBuilder()
.RequireAuthenticatedUser()
.AddRequirements(new AuthorizationPageRequirement())
.Build();
o.Filters.Add(new AuthorizeFilter(policy));
o.Filters.Add(typeof(MyActionFilter));
}).SetCompatibilityVersion(CompatibilityVersion.Version_3_0)
.AddSessionStateTempDataProvider()//Prevent request too long
.AddNewtonsoftJson(options => { options.SerializerSettings.ReferenceLoopHandling = Newtonsoft.Json.ReferenceLoopHandling.Ignore; });
services.ConfigureApplicationCookie(options =>
{
options.LoginPath = new PathString("/Account/Login");
options.AccessDeniedPath = new PathString("/Account/AccessDenied");
//options.LogoutPath = new PathString("/[your-path]");
});
var host = Configuration["DocumentManagementSettings:Host"];
var stringServerContextLogin = Configuration["DocumentManagementSettings:ServerContextLogin"];
var serverContextLogin = stringServerContextLogin.Split(";");
if (serverContextLogin.Length < 2)
throw new Exception("Destination Server must be set first");
//set global variable param model
var globalVariable = new GlobalVariableParamModel()
{
ApplicationDomain = Configuration["GlobalVariable:ApplicationDomain"],
ApplicationName = Configuration["GlobalVariable:ApplicationName"],
ConnectionString = Configuration["ConnectionStrings:DefaultConnection"],
ContentRootPath = environment.ContentRootPath,
ReportBaseUrl = Configuration["GlobalVariable:ReportDomain"],
EmailNotificationUsername = Configuration["GlobalVariable:EmailNotificationUsername"],
EmailNotificationPassword = Configuration["GlobalVariable:EmailNotificationPassword"],
EmailNotificationHost = Configuration["GlobalVariable:EmailNotificationHost"],
EmailNotificationPort = Convert.ToInt16(Configuration["GlobalVariable:EmailNotificationPort"]),
SensenetBaseUrl = Configuration["GlobalVariable:SensenetBaseUrl"],
EnvironmentVariable = !string.IsNullOrEmpty(Configuration["GlobalVariable:EnvironmentVariable"]) ? Configuration["GlobalVariable:EnvironmentVariable"] : "Development"
};
services.AddSingleton(globalVariable);
//services.AddDbContext<ApplicationDbContext>(options => options.UseSqlServer(AppSettingsJson.GetConnectionString()));
services.AddDbContext<ApplicationDbContext>(options => options.UseSqlServer(Configuration["ConnectionStrings:DefaultConnection"]));
//services.AddDbContext<ApplicationDbContext>(options => options.UseSqlServer(Configuration["ConnectionStrings:PetroseaConnection"]));
//Get from APPSETTING.JSON connectionString
//add identity
services.AddIdentity<ApplicationUser, ApplicationRole>(options =>
{
options.Password.RequiredLength = 8;
options.Password.RequireLowercase = false;
options.Password.RequireUppercase = false;
options.Password.RequireNonAlphanumeric = false;
options.Password.RequireDigit = false;
})
.AddEntityFrameworkStores<ApplicationDbContext>()
.AddDefaultTokenProviders();
services.AddDistributedMemoryCache();
services.AddSession(options =>
{
// Set a short timeout for easy testing.
options.IdleTimeout = TimeSpan.FromSeconds(10);
options.Cookie.HttpOnly = true;
options.Cookie.IsEssential = true;
});
//services.AddKendo();
services.Configure<CookiePolicyOptions>(options =>
{
// This lambda determines whether user consent for non-essential cookies is needed for a given request.
options.CheckConsentNeeded = context => true;
options.MinimumSameSitePolicy = SameSiteMode.None;
});
services.AddTransient<IAuthorizationHandler, AuthorizationPageHandler>();
services.AddScoped<IPrincipal>(sp => sp.GetService<IHttpContextAccessor>().HttpContext.User);
}
// This method gets called by the runtime. Use this method to configure the HTTP request pipeline.
public void Configure(IApplicationBuilder app, IHostingEnvironment env)
{
if (env.IsDevelopment())
{
app.UseDeveloperExceptionPage();
}
else
{
app.UseExceptionHandler("/Home/Error");
// The default HSTS value is 30 days. You may want to change this for production scenarios, see https://aka.ms/aspnetcore-hsts.
app.UseHsts();
}
app.UseHttpsRedirection();
app.UseStaticFiles();
app.UseRouting();
app.UseAuthorization();
app.UseEndpoints(endpoints =>
{
endpoints.MapControllerRoute(
name: "default",
pattern: "{controller=Home}/{action=Index}/{id?}");
});
}
}
}
<file_sep>using System;
using Microsoft.EntityFrameworkCore.Migrations;
namespace Sanofi.Infrastructure.Migrations
{
public partial class firstmigration : Migration
{
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.EnsureSchema(
name: "Administrator");
migrationBuilder.CreateTable(
name: "AspNetRoles",
columns: table => new
{
Id = table.Column<string>(nullable: false),
Name = table.Column<string>(maxLength: 256, nullable: true),
NormalizedName = table.Column<string>(maxLength: 256, nullable: true),
ConcurrencyStamp = table.Column<string>(nullable: true),
CustomRoleName = table.Column<string>(nullable: true),
Description = table.Column<string>(nullable: true),
IsAdministrator = table.Column<bool>(nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_AspNetRoles", x => x.Id);
});
migrationBuilder.CreateTable(
name: "AspNetUsers",
columns: table => new
{
Id = table.Column<string>(nullable: false),
UserName = table.Column<string>(maxLength: 256, nullable: true),
NormalizedUserName = table.Column<string>(maxLength: 256, nullable: true),
Email = table.Column<string>(maxLength: 256, nullable: true),
NormalizedEmail = table.Column<string>(maxLength: 256, nullable: true),
EmailConfirmed = table.Column<bool>(nullable: false),
PasswordHash = table.Column<string>(nullable: true),
SecurityStamp = table.Column<string>(nullable: true),
ConcurrencyStamp = table.Column<string>(nullable: true),
PhoneNumber = table.Column<string>(nullable: true),
PhoneNumberConfirmed = table.Column<bool>(nullable: false),
TwoFactorEnabled = table.Column<bool>(nullable: false),
LockoutEnd = table.Column<DateTimeOffset>(nullable: true),
LockoutEnabled = table.Column<bool>(nullable: false),
AccessFailedCount = table.Column<int>(nullable: false),
UserCode = table.Column<string>(nullable: true),
FullName = table.Column<string>(nullable: true),
Title = table.Column<string>(nullable: true),
Gender = table.Column<string>(nullable: true),
Birthdate = table.Column<DateTime>(nullable: true),
JoinDate = table.Column<DateTime>(nullable: true),
Status = table.Column<string>(nullable: true),
StatusEndDate = table.Column<DateTime>(nullable: true),
JobTitle = table.Column<string>(nullable: true),
EmploymentType = table.Column<string>(nullable: true),
ProfilePic = table.Column<string>(nullable: true),
IsActive = table.Column<bool>(nullable: true),
CreatedBy = table.Column<string>(nullable: true),
CreatedDate = table.Column<DateTime>(nullable: true),
UpdatedBy = table.Column<string>(nullable: true),
UpdatedDate = table.Column<DateTime>(nullable: true),
DelDate = table.Column<DateTime>(nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_AspNetUsers", x => x.Id);
table.ForeignKey(
name: "FK_AspNetUsers_AspNetUsers_CreatedBy",
column: x => x.CreatedBy,
principalTable: "AspNetUsers",
principalColumn: "Id",
onDelete: ReferentialAction.Restrict);
table.ForeignKey(
name: "FK_AspNetUsers_AspNetUsers_UpdatedBy",
column: x => x.UpdatedBy,
principalTable: "AspNetUsers",
principalColumn: "Id",
onDelete: ReferentialAction.Restrict);
});
migrationBuilder.CreateTable(
name: "AspNetRoleClaims",
columns: table => new
{
Id = table.Column<int>(nullable: false)
.Annotation("SqlServer:Identity", "1, 1"),
RoleId = table.Column<string>(nullable: false),
ClaimType = table.Column<string>(nullable: true),
ClaimValue = table.Column<string>(nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_AspNetRoleClaims", x => x.Id);
table.ForeignKey(
name: "FK_AspNetRoleClaims_AspNetRoles_RoleId",
column: x => x.RoleId,
principalTable: "AspNetRoles",
principalColumn: "Id",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "AspNetUserClaims",
columns: table => new
{
Id = table.Column<int>(nullable: false)
.Annotation("SqlServer:Identity", "1, 1"),
UserId = table.Column<string>(nullable: false),
ClaimType = table.Column<string>(nullable: true),
ClaimValue = table.Column<string>(nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_AspNetUserClaims", x => x.Id);
table.ForeignKey(
name: "FK_AspNetUserClaims_AspNetUsers_UserId",
column: x => x.UserId,
principalTable: "AspNetUsers",
principalColumn: "Id",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "AspNetUserLogins",
columns: table => new
{
LoginProvider = table.Column<string>(nullable: false),
ProviderKey = table.Column<string>(nullable: false),
ProviderDisplayName = table.Column<string>(nullable: true),
UserId = table.Column<string>(nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_AspNetUserLogins", x => new { x.LoginProvider, x.ProviderKey });
table.ForeignKey(
name: "FK_AspNetUserLogins_AspNetUsers_UserId",
column: x => x.UserId,
principalTable: "AspNetUsers",
principalColumn: "Id",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "AspNetUserRoles",
columns: table => new
{
UserId = table.Column<string>(nullable: false),
RoleId = table.Column<string>(nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_AspNetUserRoles", x => new { x.UserId, x.RoleId });
table.ForeignKey(
name: "FK_AspNetUserRoles_AspNetRoles_RoleId",
column: x => x.RoleId,
principalTable: "AspNetRoles",
principalColumn: "Id",
onDelete: ReferentialAction.Cascade);
table.ForeignKey(
name: "FK_AspNetUserRoles_AspNetUsers_UserId",
column: x => x.UserId,
principalTable: "AspNetUsers",
principalColumn: "Id",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "AspNetUserTokens",
columns: table => new
{
UserId = table.Column<string>(nullable: false),
LoginProvider = table.Column<string>(nullable: false),
Name = table.Column<string>(nullable: false),
Value = table.Column<string>(nullable: true)
},
constraints: table =>
{
table.PrimaryKey("PK_AspNetUserTokens", x => new { x.UserId, x.LoginProvider, x.Name });
table.ForeignKey(
name: "FK_AspNetUserTokens_AspNetUsers_UserId",
column: x => x.UserId,
principalTable: "AspNetUsers",
principalColumn: "Id",
onDelete: ReferentialAction.Cascade);
});
migrationBuilder.CreateTable(
name: "Feature",
schema: "Administrator",
columns: table => new
{
ID = table.Column<string>(nullable: false),
CreatedBy = table.Column<string>(nullable: true),
CreatedDate = table.Column<DateTime>(nullable: true),
UpdatedBy = table.Column<string>(nullable: true),
UpdatedDate = table.Column<DateTime>(nullable: true),
DelDate = table.Column<DateTime>(nullable: true),
FeatureName = table.Column<string>(nullable: true),
AreaName = table.Column<string>(nullable: true),
ControllerName = table.Column<string>(nullable: true),
ActionName = table.Column<string>(nullable: true),
MenuIcon = table.Column<string>(nullable: true),
IsMenu = table.Column<bool>(nullable: true),
Description = table.Column<string>(nullable: true),
FeatureID = table.Column<string>(nullable: true),
ParentMenuID = table.Column<string>(nullable: true),
Sequence = table.Column<int>(nullable: false),
SequenceChild = table.Column<int>(nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_Feature", x => x.ID);
table.ForeignKey(
name: "FK_Feature_AspNetUsers_CreatedBy",
column: x => x.CreatedBy,
principalTable: "AspNetUsers",
principalColumn: "Id",
onDelete: ReferentialAction.Restrict);
table.ForeignKey(
name: "FK_Feature_Feature_ParentMenuID",
column: x => x.ParentMenuID,
principalSchema: "Administrator",
principalTable: "Feature",
principalColumn: "ID",
onDelete: ReferentialAction.Restrict);
table.ForeignKey(
name: "FK_Feature_AspNetUsers_UpdatedBy",
column: x => x.UpdatedBy,
principalTable: "AspNetUsers",
principalColumn: "Id",
onDelete: ReferentialAction.Restrict);
});
migrationBuilder.CreateTable(
name: "RoleFeature",
schema: "Administrator",
columns: table => new
{
ID = table.Column<string>(nullable: false),
CreatedBy = table.Column<string>(nullable: true),
CreatedDate = table.Column<DateTime>(nullable: true),
UpdatedBy = table.Column<string>(nullable: true),
UpdatedDate = table.Column<DateTime>(nullable: true),
DelDate = table.Column<DateTime>(nullable: true),
FeatureID = table.Column<string>(nullable: true),
RoleID = table.Column<string>(nullable: true),
IsAddView = table.Column<bool>(nullable: false),
IsEdit = table.Column<bool>(nullable: false),
IsDelete = table.Column<bool>(nullable: false)
},
constraints: table =>
{
table.PrimaryKey("PK_RoleFeature", x => x.ID);
table.ForeignKey(
name: "FK_RoleFeature_AspNetUsers_CreatedBy",
column: x => x.CreatedBy,
principalTable: "AspNetUsers",
principalColumn: "Id",
onDelete: ReferentialAction.Restrict);
table.ForeignKey(
name: "FK_RoleFeature_Feature_FeatureID",
column: x => x.FeatureID,
principalSchema: "Administrator",
principalTable: "Feature",
principalColumn: "ID",
onDelete: ReferentialAction.Restrict);
table.ForeignKey(
name: "FK_RoleFeature_AspNetRoles_RoleID",
column: x => x.RoleID,
principalTable: "AspNetRoles",
principalColumn: "Id",
onDelete: ReferentialAction.Restrict);
table.ForeignKey(
name: "FK_RoleFeature_AspNetUsers_UpdatedBy",
column: x => x.UpdatedBy,
principalTable: "AspNetUsers",
principalColumn: "Id",
onDelete: ReferentialAction.Restrict);
});
migrationBuilder.CreateIndex(
name: "IX_AspNetRoleClaims_RoleId",
table: "AspNetRoleClaims",
column: "RoleId");
migrationBuilder.CreateIndex(
name: "RoleNameIndex",
table: "AspNetRoles",
column: "NormalizedName",
unique: true,
filter: "[NormalizedName] IS NOT NULL");
migrationBuilder.CreateIndex(
name: "IX_AspNetUserClaims_UserId",
table: "AspNetUserClaims",
column: "UserId");
migrationBuilder.CreateIndex(
name: "IX_AspNetUserLogins_UserId",
table: "AspNetUserLogins",
column: "UserId");
migrationBuilder.CreateIndex(
name: "IX_AspNetUserRoles_RoleId",
table: "AspNetUserRoles",
column: "RoleId");
migrationBuilder.CreateIndex(
name: "IX_AspNetUsers_CreatedBy",
table: "AspNetUsers",
column: "CreatedBy");
migrationBuilder.CreateIndex(
name: "EmailIndex",
table: "AspNetUsers",
column: "NormalizedEmail");
migrationBuilder.CreateIndex(
name: "UserNameIndex",
table: "AspNetUsers",
column: "NormalizedUserName",
unique: true,
filter: "[NormalizedUserName] IS NOT NULL");
migrationBuilder.CreateIndex(
name: "IX_AspNetUsers_UpdatedBy",
table: "AspNetUsers",
column: "UpdatedBy");
migrationBuilder.CreateIndex(
name: "IX_Feature_CreatedBy",
schema: "Administrator",
table: "Feature",
column: "CreatedBy");
migrationBuilder.CreateIndex(
name: "IX_Feature_ParentMenuID",
schema: "Administrator",
table: "Feature",
column: "ParentMenuID");
migrationBuilder.CreateIndex(
name: "IX_Feature_UpdatedBy",
schema: "Administrator",
table: "Feature",
column: "UpdatedBy");
migrationBuilder.CreateIndex(
name: "IX_RoleFeature_CreatedBy",
schema: "Administrator",
table: "RoleFeature",
column: "CreatedBy");
migrationBuilder.CreateIndex(
name: "IX_RoleFeature_FeatureID",
schema: "Administrator",
table: "RoleFeature",
column: "FeatureID");
migrationBuilder.CreateIndex(
name: "IX_RoleFeature_RoleID",
schema: "Administrator",
table: "RoleFeature",
column: "RoleID");
migrationBuilder.CreateIndex(
name: "IX_RoleFeature_UpdatedBy",
schema: "Administrator",
table: "RoleFeature",
column: "UpdatedBy");
}
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropTable(
name: "AspNetRoleClaims");
migrationBuilder.DropTable(
name: "AspNetUserClaims");
migrationBuilder.DropTable(
name: "AspNetUserLogins");
migrationBuilder.DropTable(
name: "AspNetUserRoles");
migrationBuilder.DropTable(
name: "AspNetUserTokens");
migrationBuilder.DropTable(
name: "RoleFeature",
schema: "Administrator");
migrationBuilder.DropTable(
name: "Feature",
schema: "Administrator");
migrationBuilder.DropTable(
name: "AspNetRoles");
migrationBuilder.DropTable(
name: "AspNetUsers");
}
}
}
<file_sep># Abhimantras.Sanofi
project Sanofi (Sales Incentive)
<file_sep>using Sanofi.Core.EntitiesModel.IdentityCore;
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations.Schema;
using System.Text;
namespace Sanofi.Core.EntitiesModel.Administrator
{
[Table("RoleFeature", Schema = "Administrator")]
public class RoleFeature : BaseEntity
{
public string FeatureID { get; set; }
public Feature Feature { get; set; }
public string RoleID { get; set; }
public ApplicationRole Role { get; set; }
// Ali
public bool IsAddView { get; set; }
public bool IsEdit { get; set; }
public bool IsDelete { get; set; }
}
}
<file_sep>using System;
namespace Sanofi.Service
{
public class Class1
{
}
}
<file_sep>using Microsoft.EntityFrameworkCore;
using Sanofi.Core;
using System;
using System.Collections.Generic;
using System.Text;
namespace Sanofi.Infrastructure.DbContext
{
public class ApplicationDbContextFactory
{
private static readonly string ConnectionString = AppSettingJson.GetConnectionString();
public ApplicationDbContext CreateDbContext(string[] args)
{
var optionsBuilder = new DbContextOptionsBuilder<ApplicationDbContext>();
optionsBuilder.UseSqlServer(ConnectionString);
return new ApplicationDbContext(optionsBuilder.Options);
}
}
}<file_sep>using System;
using System.Collections.Generic;
using System.Text;
namespace Sanofi.Core
{
public class GlobalVariableParamModel
{
public string ConnectionString { get; set; }
public string ApplicationName { get; set; }
public string ApplicationDomain { get; set; }
public string ApplicationWebAppDomain { get; set; }
public string ContentRootPath { get; set; }
public string ReportBaseUrl { get; set; }
public string EmailNotificationUsername { get; set; }
public string EmailNotificationPassword { get; set; }
public string EmailNotificationHost { get; set; }
public int EmailNotificationPort { get; set; }
public string SensenetBaseUrl { get; set; }
public string EnvironmentVariable { get; set; }
}
}
<file_sep>using Abhimantra.Sanofi.Base;
using Microsoft.AspNetCore.Mvc.Filters;
using Sanofi.Core;
using Sanofi.Core.EntitiesModel.Administrator;
using Sanofi.Infrastructure.DbContext;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Security.Principal;
using System.Threading.Tasks;
namespace Abhimantra.Sanofi
{
public class MyActionFilter : BaseRepository, IActionFilter
{
public MyActionFilter(ApplicationDbContext context, IPrincipal principal, GlobalVariableParamModel globalParameter) : base(context, principal, globalParameter) { }
public IQueryable<Feature> AllFeature
{
get
{
return context.Feature.Where(feature => feature.DelDate == null);
}
}
public string GetFeature(string area, string controller, string action)
{
return AllFeature.Where(a => a.AreaName == area && a.ControllerName == controller && (a.ActionName == "index" || a.ActionName == action)).FirstOrDefault().FeatureName;
}
public void OnActionExecuted(ActionExecutedContext context)
{
var x = "after action";
//context.RouteData.Values.Add("tes2", x);
}
public void OnActionExecuting(ActionExecutingContext context)
{
try
{
var x = "before action";
string action = context.RouteData.Values["action"].ToString();
if (action.ToLower().Contains("index") || action.ToLower().Contains("create") || action.ToLower().Contains("edit") || action.ToLower().Contains("detail"))
{
string area = context.RouteData.Values["area"].ToString();
string controller = context.RouteData.Values["controller"].ToString();
x = GetFeature(area, controller, action);
context.RouteData.Values.Add("Title", x);
}
//var tes = context.RouteData.Values["Title"];
}
catch (Exception ex)
{
var msg = ex.Message;
}
}
}
}
<file_sep>using Sanofi.Core.EntitiesModel.IdentityCore;
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Reflection;
using System.Text;
namespace Sanofi.Core.EntitiesModel
{
public class BaseEntity
{
[Key]
public string ID { get; set; }
[ForeignKey("AppUserCreateBy")]
public string CreatedBy { get; set; }
public virtual ApplicationUser AppUserCreateBy { get; set; }
public Nullable<DateTime> CreatedDate { get; set; }
[ForeignKey("AppUserUpdateBy")]
public string UpdatedBy { get; set; }
public virtual ApplicationUser AppUserUpdateBy { get; set; }
public Nullable<DateTime> UpdatedDate { get; set; }
public Nullable<DateTime> DelDate { get; set; }
//public Boolean IsActived { get; set; }
public object this[string propertyName]
{
get
{
Type myType = GetType();// typeof(BaseEntity);
PropertyInfo myPropInfo = myType.GetProperty(propertyName);
return myPropInfo.GetValue(this, null);
}
set
{
Type myType = typeof(BaseEntity);
PropertyInfo myPropInfo = myType.GetProperty(propertyName);
myPropInfo.SetValue(this, value, null);
}
}
}
}
<file_sep>using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Mvc.Controllers;
using Microsoft.AspNetCore.Mvc.Filters;
using Sanofi.Infrastructure.DbContext;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
namespace Abhimantra.Sanofi.Base
{
public class AuthorizationPageRequirement : IAuthorizationRequirement
{
}
public class AuthorizationPageHandler : AuthorizationHandler<AuthorizationPageRequirement>
{
private readonly ApplicationDbContext dbContext;
public AuthorizationPageHandler(ApplicationDbContext dbContext)
{
this.dbContext = dbContext;
}
protected override Task HandleRequirementAsync(AuthorizationHandlerContext context,
AuthorizationPageRequirement requirement)
{
var mvcContext = context.Resource as AuthorizationFilterContext;
ControllerActionDescriptor descriptor = mvcContext?.ActionDescriptor as ControllerActionDescriptor;
if (descriptor != null)
{
var actionName = descriptor.ActionName;
var controllerName = descriptor.ControllerName.ToLower();
var currentUserName = context.User.Identity.Name;
if (currentUserName != null)
{
if (descriptor.MethodInfo.ReturnType == typeof(IActionResult) ||
descriptor.MethodInfo.ReturnType == typeof(Task<IActionResult>) ||
descriptor.MethodInfo.ReturnType == typeof(ActionResult) ||
descriptor.MethodInfo.ReturnType == typeof(Task<ActionResult>))
{
var hasAccess = false;
if (controllerName == "home" || controllerName == "file")// && actionName == "Index")
{
hasAccess = true;
}
else
{
if (controllerName == "investigation")
controllerName = "incident";
var currentUser = dbContext.Users.FirstOrDefault(user => user.UserName == currentUserName);
if (currentUser != null)
{
var currentUserRoles = dbContext.UserRoles.Where(rol => rol.UserId == currentUser.Id).ToList();
hasAccess = (from userRole in currentUserRoles
join roleAccess in dbContext.RoleFeature
on userRole.RoleId equals roleAccess.RoleID
join feature in dbContext.Feature
on roleAccess.FeatureID equals feature.ID
where userRole.UserId == currentUser.Id
&& feature.ControllerName?.ToLower() == controllerName
&& roleAccess.IsAddView//Tambahan, yg bisa lihat menu cuman group yg IsAddViewnya di centang -rezkar 27/01/2020
//&& feature.ActionName == actionName
select userRole).Any();
if (!hasAccess)
{
hasAccess = (from userRole in currentUserRoles
join role in dbContext.Roles
on userRole.RoleId equals role.Id
select role).Any();
}
}
}
if (hasAccess)
{
context.Succeed(requirement);
}
}
else
{
context.Succeed(requirement);
}
}
}
//TODO: Use the following if targeting a version of
//.NET Framework older than 4.6:
// return Task.FromResult(0);
return Task.CompletedTask;
}
}
}
|
7eb5928e01cb2f9b88e827a94ddba3838731309f
|
[
"Markdown",
"C#"
] | 17
|
C#
|
tryagung/Abhimantras.Sanofi-master
|
8369e63cde9c7073c3692eed2d54a76749dfd851
|
50114793a36e5161680ba1487fc193f030743a59
|
refs/heads/master
|
<repo_name>abrickaday/fend-project-memory-game<file_sep>/js/app.js
// list of 8 font awesome icon suffixes that shows up when a card is open
var icons = ['diamond', 'paper-plane-o', 'anchor', 'bolt', 'cube', 'leaf', 'bicycle', 'bomb'];
// array to hold a a total of 16 cards
var cards = [];
// duplicate the 8 icons twice so the cards array has 16 elements
for (var i = 0; i < icons.length; i++) {
cards.push(icons[i]);
cards.push(icons[i]);
}
var openedCards = []; // max 2 cards to be added here for comparison
var movesCounter = 0;
var pairsCounter = 0; // number of matched pairs for keeping track whether player has won
var min, sec, timeOut;
document.querySelector('.deck').addEventListener('click', openCard);
// add a one-off event listener that starts the timer when the user first open a card
document.querySelector('.deck').addEventListener('click', startTimer, { once: true });
document.querySelector('.restart').addEventListener('click', restartGame);
document.querySelector('.playBtn').addEventListener('click', replayGame);
var modal = document.getElementsByClassName('modal')[0];
var close = document.getElementsByClassName('close')[0];
close.addEventListener('click', function() { modal.style.display = 'none'; } );
window.addEventListener('click', function (event) { if (event.target == modal) { modal.style.display = 'none';} } );
shuffle(cards);
addCard(cards);
function addCard(cards) {
var deck = document.querySelector('.deck');
for (var i=0; i < cards.length; i++) {
// create a new li element
var newLi = document.createElement('li');
// use css class 'card' to show the cards faced down
newLi.className = 'card';
// add an id for each li element
newLi.id = i.toString();
// create a new i element
var newI = document.createElement('i');
var iconClassName = 'fa fa-' + cards[i];
newI.className = iconClassName;
newLi.appendChild(newI);
deck.appendChild(newLi);
}
}
// Shuffle function from http://stackoverflow.com/a/2450976
function shuffle(array) {
var currentIndex = array.length, temporaryValue, randomIndex;
while (currentIndex !== 0) {
randomIndex = Math.floor(Math.random() * currentIndex);
currentIndex -= 1;
temporaryValue = array[currentIndex];
array[currentIndex] = array[randomIndex];
array[randomIndex] = temporaryValue;
}
return array;
}
// opening, closing or showing of cards are done by adding and removing the respective CSS classes
function openCard(event) {
if (event.target.classList.contains('open', 'show') || event.target.classList.contains('fa') || event.target.classList.contains('deck')) {}
else {
countMoves();
event.target.classList.add('open', 'show');
addToOpenCards(event.target);
}
}
function showCards(card1, card2) {
card1.classList.add('match');
card2.classList.add('match');
}
function closeCards(card1, card2) {
card1.classList.remove('open', 'show', 'try-again');
card2.classList.remove('open', 'show', 'try-again');
}
function addToOpenCards(openedCard) {
if (openedCards.length == 0) {
openedCards.push(openedCard);
} else if (openedCards.length == 1) {
openedCards.push(openedCard);
var openedCard1 = document.getElementById(openedCards[0].id);
var openedCard2 = document.getElementById(openedCards[1].id);
checkCards(openedCard1, openedCard2);
} else {
openedCards.length = 0;
openedCards.push(openedCard);
}
}
function checkCards(card1, card2) {
icon1 = card1.firstElementChild.className;
icon2 = card2.firstElementChild.className;
if (icon1 == icon2) {
card1.classList.add('paired');
card2.classList.add('paired');
showCards(card1, card2);
pairsCounter += 1;
checkWin();
} else {
card1.classList.add('try-again');
card2.classList.add('try-again');
setTimeout(closeCards, 500, card1, card2);
}
}
function checkWin() {
if (pairsCounter == 8) {
// stop the timer
stopTimer();
// show the win modal
modal.style.display = "block";
// get the number of stars and total time elapsed at the point of winning
var winStars = document.querySelector('.score-panel').firstElementChild.innerHTML;
var winTime = document.querySelector('.timer').textContent;
// update the modal with the number of winning stars, time taken and moves taken
document.querySelector('.winning-stars').innerHTML = winStars;
document.querySelector('.winning-timer').textContent = winTime;
document.querySelector('.winning-moves').innerHTML = movesCounter;
}
}
function countMoves() {
movesCounter += 1;
document.querySelector('.moves').innerHTML = movesCounter;
if (movesCounter == 30) {
removeStar();
} else if (movesCounter == 40) {
removeStar();
} else {}
}
function resetMoves() {
movesCounter = 0;
document.querySelector('.moves').innerHTML = movesCounter;
}
function removeStar() {
document.querySelector('.stars').firstElementChild.remove();
}
// use the functions waitOneSec and addOneSec to create an infinite loop that keeps counting
function addOneSec() {
sec += 1;
updateTimer();
waitOneSec();
}
function waitOneSec() {
timeOut = setTimeout(addOneSec,1000);
}
function startTimer() {
min = 0;
sec = 0;
waitOneSec();
}
function resetTimer() {
clearTimeout(timeOut);
document.querySelector('.timer').textContent = '00:00';
}
function stopTimer() {
clearTimeout(timeOut);
}
function updateTimer() {
if (sec >= 60) {
sec = 0;
min += 1;
}
var sec_f, min_f; //formatted sec & min
if (sec < 10) {
sec_f = '0' + sec.toString(); // prefix a zero for 0 to 9 seconds
} else {
sec_f = sec.toString();
}
if (min < 10) {
min_f = '0' + min.toString(); // prefix a zero for 0 to 9 minutes
} else {
min_f = min.toString();
}
var time = min_f + ':' + sec_f; // format to be displayed
document.querySelector('.timer').textContent = time;
}
function clearCards() {
document.querySelector('.deck').innerHTML = '';
}
function resetStars() {
var stars = document.querySelector('.stars');
stars.innerHTML = '';
for (var i=0; i < 3; i++) {
//create a new li element
var newLi = document.createElement('li');
//create a new i createElement
var newI = document.createElement('i');
newI.className = 'fa fa-star';
newLi.appendChild(newI);
stars.appendChild(newLi);
}
}
function restartGame() {
openedCards = [];
clearCards();
shuffle(cards);
addCard(cards);
resetMoves();
resetStars();
resetTimer();
document.querySelector('.deck').addEventListener('click', startTimer, { once: true, });
pairsCounter = 0;
}
function replayGame() {
modal.style.display = "none";
restartGame();
}
<file_sep>/README.md
# Memory Game Project
## Table of Contents
* [Instructions](#instructions)
* [Dependencies](#dependencies)
## Instructions
The objective of this Memory Game is to find all 8 matching pairs of cards.
A stopwatch keeps track of the time elapsed since the start of the game and a moves counter keeps track of the number of moves taken.
At the start of the game, the player starts with a full rating of 3 stars. The rating will drop when the number of moves taken increases.
When all card pairs are found, a winning modal shows the winning statistics.
## Dependencies
This project uses the following external libraries:
* [Font Awesome](https://fontawesome.com/?from=io)
* [Google Fonts](https://fonts.google.com/)
|
fe828e82f0be8827b62f5ca03dedfd10b192d24f
|
[
"JavaScript",
"Markdown"
] | 2
|
JavaScript
|
abrickaday/fend-project-memory-game
|
b19ce2e5e4d73844755c978d912e22dc15a4da06
|
fc1d8ef585baaf4314f4e8ada6c63496dbc8b251
|
refs/heads/master
|
<repo_name>ashiqf/SharepointApps<file_sep>/ConsoleApplicationSample/Program.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace ConsoleApplicationSample
{
class Program
{
static void Main(string[] args)
{
var rt = "test";
rt.IndexOf("es");
}
}
}
|
df2ac3822cf9f42f39367a470bff0399bb9e90cc
|
[
"C#"
] | 1
|
C#
|
ashiqf/SharepointApps
|
f074cd65af99d5f0dff3cb1557c67b614a3e3434
|
62343af0b7674c4307c49f574b133b80abcfdfeb
|
refs/heads/master
|
<repo_name>XiaoMutt/Cloveriver<file_sep>/scheduler.php
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">
<?php
include_once '_basics.php';
CheckUser();
$cOpt = new OperateMyEquipment();
$aaEquipment = $cOpt->getAuthEquipment();
if (count($aaEquipment)) {
if(isset($_GET["equipment"])){
$id=base64_decode($_GET["equipment"]);
foreach ($aaEquipment as $aEq){
if($id==$aEq["id"]){
$sSelected=$id;
break;
}
}
}
if(!isset($sSelected)){
$sSelected=$aaEquipment[0]["id"];
}
}
?>
<html>
<head>
<meta http-equiv="content-type" content="text/html; charset=utf-8" />
<title>Cloveriver</title>
<link rel="icon" href="icons/clover.ico" />
<style type="text/css" title="currentStyle">
@import "css/page.css";
@import "css/data_table_jui.css";
@import "css/jquery-ui-1.8.16.custom.css";
</style>
<script type="text/javascript" src="js/jquery.js"></script>
<script type="text/javascript" src="js/jquery.dataTables.js"></script>
<script type="text/javascript" src="js/jquery-ui-1.8.16.custom.min.js"></script>
<script type="text/javascript" src="js/common.js"></script>
<script type="text/javascript" charset="utf-8">
var oTable; //datatable object;
var aData=Array(); //Data;
var pickedDate;// in YYYY-mm-dd format;
var sServerSide="server_side_equipment_record_processing.php"; //server side processing php file name;
var sSelected=<?php echo (isset($sSelected) ? $sSelected : "false"); ?>;// the table selected;
//
temp=new Date();
pickedDate=temp.getFullYear()+"-"+(temp.getMonth()+1)+"-"+temp.getDate();
// Formating function for row details
function fnShowDetails ( nTr )
{
fnMessenger("waiting", "Contacting server...");
$.post(
sServerSide,
{action: "detail", iId: aData[1]}, //aData[1] contains the id of the row that was clicked;
function(data){
$('#messenger_dialog').hide();
var sOut='<table cellpadding="5" cellspacing="0" border="0" style="padding-left:50px;">';
for (var sVal in data){
sOut+="<tr><td>"+sVal+": </td><td>"+data[sVal]+"</td></tr>";
}
sOut += '</table>';
oTable.fnOpen( nTr, sOut, 'details' );
},
"json"
).error(function (){fnMessenger("error", "Server error!");});
}
//function for send edited form to server;
function fnUpdateRecord(){
var aName =['id'];
var aValue=new Array();
var aaValue=new Array();
aValue.push(aData[1]);
$('#edit_form input').each(function(){
aName.push($(this).attr('name'));
aValue.push($(this).val());
});
aName.push("status");
aValue.push($('#edit_form select[name="status"]').val());
aaValue.push(aValue);
var sName=array2json(aName);
var sValue=array2json(aaValue);
fnMessenger("waiting", "Contacting server...");
$.post(
sServerSide,
{action: "update", keys: sName, values: sValue},
function (data){
if (data["changed"]=="0"){
fnMessenger("warning", "Please check your input.");
$('#edit_message').html(data["errors"]);
}
else{
if(aValue[0]==sSelected){
window.location.href="scheduler.php?equipment="+encode64(aData[1]);
}
else{
fnMessenger("OK", "Record updated.");
$('#edit_dialog').dialog( "close" );
oTable.fnClearTable();
oTable.fnDraw();
}
}
},
"json"
).error(function (){fnMessenger("error", "Server error!");});
}
function fnFormatEditDialog(){
$('#edit_message').html("Please Enter");
fnMessenger("waiting", "Contacting server...");
$.post(
sServerSide,
{action: "edit", iId: aData[1]}, //aData[1] contains the id of the row that was clicked;
function(data){
$('#messenger_dialog').hide();
for (var sVal in data){
$('#edit_form input[name="'+sVal+'"]').val(data[sVal]);
}
$('#edit_form select[name="status"]').val(data["status"]);
$("#edit_dialog").dialog("open");
},
"json"
).error(function(){fnMessenger("error", "Server error!");});
}
//function for deleting records;
function fnDeleteRecords ()
{
//make the clicked row id into a array and then in a string in which ids are separated by ",". This is because the deleteRecords functions require an array as argument;
var aIds=Array();
aIds[0]=aData[1];
sIdString=aIds.join(",");
fnMessenger("waiting", "Contacting server...");
$.post(
sServerSide,
{action: "delete", sIds: sIdString},
function(data) {
if (data["changed"]=="0"){
fnMessenger("warning", "Operation failed at server side!");
}
else{
if(aIds[0]==sSelected){
window.location.href="scheduler.php";
}
else{
fnMessenger("OK", "Record deleted.");
oTable.fnClearTable();
oTable.fnDraw();
}
}
},
"json"
).error(function(){fnMessenger("error", "Server error!");});
}
function fnDeleteReservation(div){
var id=$(div).attr("name");
fnMessenger("waiting", "Contacting server...");
$.post(
"server_side_user_equipment_reservation_record_processing.php",
{action: "delete_reservation", id: id},
function(data) {
if (data["changed"]=="0"){
fnMessenger("warning", "Operation failed at server side!");
}
else{
fnMessenger("OK", "Reservation deleted.");
$(div).remove();
}
},
"json"
).error(function(){fnMessenger("error", "Server error!");});
}
function fnGetReservationList(index, sDate){
var j, h1, m1, h2, m2;
$("#revlist"+index).html("");
fnMessenger("waiting", "Contacting server...");
$.post(
"server_side_user_equipment_reservation_record_processing.php",
{action:"list", date: sDate, equipment_id: sSelected},
function(data){
$('#messenger_dialog').hide();
for (j in data){
h1=Math.floor(data[j]["from"]/100);
m1=(data[j]["from"]%100)/100*60;
h2=Math.floor(data[j]["to"]/100);
m2=(data[j]["to"]%100)/100*60;
m1=(m1==0?"00":m1);
m2=(m2==0?"00":m2);
$("#revlist"+index).append('<div style="width: 100%" name="'+data[j]["id"]+'">'+h1+':'+m1+'-'+h2+':'+m2+' '+data[j]["user_id"]+data[j]["description"]+data[j]["actions"]+'</div>');
}
},
"json").error(function (){fnMessenger("error", "Server error!");});
}
function fnSetDatelist(date){
var weekDay=['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday'];
var indexDate=new Date();
for (var i=1; i<6; i++){
$('#date'+i).html("");
indexDate.setFullYear(date.getFullYear(), date.getMonth(), date.getDate()-3+i);
$('#date'+i).html((indexDate.getMonth()+1)+"/"+indexDate.getDate()+"/"+indexDate.getFullYear()+" "+weekDay[indexDate.getDay()]);
fnGetReservationList(i, indexDate.getFullYear()+"-"+(indexDate.getMonth()+1)+"-"+indexDate.getDate());
}
}
function fnSubmitReservation(){
var values=$("#time_slider").slider( "option", "values" );
var from=values[0]*100;
var to=values[1]*100;
var description=$("#comments").val();
fnMessenger("waiting", "Contacting server...");
$.post(
"server_side_user_equipment_reservation_record_processing.php",
{action:"reserve", date: pickedDate, equipment_id: sSelected, from: from, to: to, description: description},
function(data){
if(data["changed"]==0){
fnMessenger("warning", data["errors"]);
}
else{
fnGetReservationList(3, pickedDate);
fnMessenger("OK", "Reservation submitted.");
}
},
"json").error(function (){fnMessenger("error", "Server error!");});
}
//******document ready******
$(document).ready(function() {
//prepare accordion;
$('.accordionhead').click(function() {
$(this).next().slideToggle(500);
return false;
})
//prepare datepicker;
$( "#datepicker" ).datepicker({
onSelect: function (date){
temp=new Date(date);
pickedDate=temp.getFullYear()+"-"+(temp.getMonth()+1)+"-"+temp.getDate();
fnSetDatelist(new Date(date));
}
});
//prepare datatable;
oTable = $('#data_table').dataTable( {
"aoColumnDefs": [
{ "bSortable": false, "aTargets": [ 0 ] }
],
"aaSorting": [[1, 'asc']] ,
"sPaginationType": "full_numbers",
"bServerSide": true,
"bProcessing": true,
"bJQueryUI": true,
"sAjaxSource": "server_side_my_equipment_ajax_search.php"
} );
//prepare edit_dialog;
$('#edit_dialog').dialog({
autoOpen: false,
width: 600,
modal: true,
buttons: {
"OK": function() {
fnUpdateRecord ();
},
"Cancel": function() {
$( this ).dialog( "close" );
}
}
});
function fnGenerateEmailList(){
fnMessenger("waiting", "Contacting server...");
$.post(
sServerSide,
{action: "emaillist", id: aData[1]},
function (data){
if (data["changed"]=="0"){
fnMessenger("warning", "Server side error.");
}
else{
fnMessenger("OK", "Email list generated.");
$('#emaillist_textarea').val(data["emaillist"]);
$('#emaillist').dialog( "open" );
}
},
"json"
).error(function (){fnMessenger("error", "Server error!");});
}
//prepare delete_confirmation_dialog;
$('#delete_confirmation_dialog').dialog({
autoOpen: false,
resizable: false,
height: 160,
modal: true,
buttons: {
"Delete": function() {
fnDeleteRecords ();
$( this ).dialog( "close" );
},
"Cancel": function() {
$( this ).dialog( "close" );
}
}
});
//prepare time_slider;
$( "#time_slider" ).slider({
range: true,
min: 0,
max: 23.5,
step: 0.5,
values: [ 9, 12],
slide: function( event, ui ) {
var h1=Math.floor(ui.values[ 0 ]);
var m1=(ui.values[ 0 ]-h1)*60;
var h2=Math.floor(ui.values[ 1]);
var m2=(ui.values[ 1 ]-h2)*60;
$( "#time_slot" ).html( h1+":"+(m1==0?"00":m1)+" - "+h2+":"+(m2==0?"00":m2));
}
});
$( "#time_slot" ).html("9:00-12:00" );
$('#reserve_button').button();
$('#reserve_button').click(function(){
fnSubmitReservation();
});
//prepare emaillist dialog;
$('#emaillist').dialog({
autoOpen: false,
resizable: true,
height: 300,
width: 400,
modal: true,
buttons: {
"Got it": function() {
$( this ).dialog( "close" );
}
}
});
//prepare messenger_dialog;
$('#messenger_dialog').hide();
$('#data_table tbody td img').live( 'click', function () {
var nTr = this.parentNode.parentNode;
aData = oTable.fnGetData( nTr ); //get the data from that row; aData[1] contains the id of that record;
if (this.name=="action_open")//detail icon close;
{
// This row is already open - close it
this.name="action_close";
oTable.fnClose( nTr );
}
else if(this.name=="action_close")//detail icon open;
{
// Open this row
this.name="action_open";
fnMessenger("waiting", "Retrieving data from server...");
fnShowDetails(nTr);
}
else if (this.name=="action_edit")
{
fnFormatEditDialog();//fill the edit_dialog with existed data;
}
else if (this.name=="action_delete")//delete icon;
{
$("#delete_confirmation_dialog").dialog("open");
}
else if (this.name=="action_reserve")
{
window.location.href="scheduler.php?equipment="+encode64(aData[1]);
}
else if (this.name=="action_emaillist"){
fnGenerateEmailList();
}
} );
$('img[name="action_delete_reservation"]').live("click",
function(){
fnDeleteReservation($(this).parent());
}
);
if(sSelected!=false){
fnSetDatelist(new Date());
}
} );
</script>
</head>
<body>
<?php
Menu();
?>
<div id="container">
<div class="accordion">
<div class="ui-widget-header ui-corner-all accordionhead">Scheduler</div>
<div class="text ui-widget-content ui-corner-all accordionbody">
<?php
if (count($aaEquipment)) {
?>
<div class="schedulerpanel1">
<label>1. Choose equipment</label><br/>
<select class="text ui-widget-content ui-corner-all" id="equipment_select" onchange="window.location.href='<?php echo $_SERVER['PHP_SELF'] . '?equipment=' ?>'+$('#equipment_select option:selected').attr('value')">
<?php
foreach ($aaEquipment as &$aEquip) {
echo '<option ' . ($aEquip["id"] == $sSelected ? 'selected="selected"' : '') . ' value="' . base64_encode($aEquip["id"]) . '">' . $aEquip["name"] . '</option>';
}
?>
</select>
<br/>
<div id="equipment_detail">
<?php
$cOptEquip = new OperateEquipment;
$aDetail = $cOptEquip->readRecord($sSelected, "brief", true);
foreach ($aDetail as $sKey => $sValue) {
echo '<label>' . $sKey . ': </label><label name="' . $sKey . '">' . $sValue . '</label></br>';
}
?>
</div>
</div>
<div class="schedulerpanel2"><div id="datepicker">2. Pick up a date</div></div>
<div class="schedulerpanel3">
<label>3. Please pick a time slot: </label>
<label id="time_slot"></label><div id="time_slider"></div>
<label><img src="icons/comment.png"/>Any thing to say?</label><br/>
<textarea id="comments" style="width:300px; height: 80px"></textarea><br/>
<button id="reserve_button" style="float:right">Add Reservation</button>
</div>
<ul id="datelist">
<li class="ui-state-default"><div id="date1" class="ui-widget-header ui-corner-all"></div><div id="revlist1"></div></li>
<li class="ui-state-default"><div id="date2" class="ui-widget-header ui-corner-all"></div><div id="revlist2"></div></li>
<li class="ui-state-default"><div id="date3" class="ui-widget-header ui-corner-all"></div><div id="revlist3"></div></li>
<li class="ui-state-default"><div id="date4" class="ui-widget-header ui-corner-all"></div><div id="revlist4"></div></li>
<li class="ui-state-default"><div id="date5" class="ui-widget-header ui-corner-all"></div><div id="revlist5"></div></li>
</ul>
<?php
} else {
?>
You do not have access to any equipment.<br/>
<?php
}
?>
</div>
</div>
<div class="spacer"></div>
<div class="accordion">
<div class="ui-widget-header ui-corner-all accordionhead">Equipment that I can use</div>
<div class="table_jui">
<table cellpadding="0" cellspacing="0" border="0" class="display" id="data_table">
<thead>
<tr>
<?php
$aFields = array();
$cOpt->getFields($aFields, "label", "brief");
echo "<th>Actions</th>";
foreach ($aFields as &$sColumn) {
echo "<th>" . $sColumn . "</th>";
}
?>
</tr>
</thead>
<tbody>
</tbody>
</table>
</div>
</div>
</div>
<!--email list dialog-->
<div id="emaillist" title="Please copy the follow email list to your email client">
<textarea id="emaillist_textarea" style="width:360px; height: 180px;"></textarea>
</div>
<!--delete dialog-->
<div id="delete_confirmation_dialog" title="Delete confirmation"><p><span class="ui-icon ui-icon-alert" style="float:left; margin:0 7px 20px 0;"></span>Do you really want to delete this equipment?</p></div>
<!--edit dialog-->
<div id="edit_dialog" title="Edit this equipment">
<form id="edit_form">
<div id="edit_message">Please Enter</div>
<fieldset>
<?php
$cOpt->getFields($aFieldNames, "name", "edit");
$cOpt->getFields($aFieldLabels, "label", "edit");
for ($i = 0; $i < count($aFieldNames); $i++) {
if ($aFieldNames[$i] == "status") {
echo '<label>' . $aFieldLabels[$i] . '</label>';
echo '<select name="' . $aFieldNames[$i] . '" class="text ui-widget-content ui-corner-all">
<option value="normal">normal</option>
<option value="problematic">problematic</option>
<option value="broken">broken</admin>
</select>
<br/>
';
} else {
echo '<label>' . $aFieldLabels[$i] . '</label>';
echo '<input type="text" name="' . $aFieldNames[$i] . '" class="text ui-widget-content ui-corner-all" /><br/>';
}
}
?>
</fieldset>
</form>
</div>
</body>
</html><file_sep>/index.php
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">
<?php
include_once '_basics.php';
?>
<html>
<head>
<meta http-equiv="content-type" content="text/html; charset=utf-8" />
<title>Cloveriver</title>
<link rel="icon" href="icons/clover.ico" />
<style type="text/css" title="currentStyle">
@import "css/jquery-ui-1.8.16.custom.css";
@import "css/page.css";
</style>
<script type="text/javascript" src="js/jquery.js"></script>
<script type="text/javascript" src="js/jquery-ui-1.8.16.custom.min.js"></script>
<script type="text/javascript" src="js/common.js"></script>
<script type="text/javascript" charset="utf-8">
function fnLogin(){
var sEmail=$('#login_email').val();
var sPassword=<PASSWORD>.<PASSWORD>($('#login_password').val());
$.post(
"server_side_user_login.php",
{email: sEmail, password: sPassword},
function(msg){
if(msg=="OK"){
window.location.href="scheduler.php";
}
else{
$('#login_message').text(msg);
}
},
"json"
).error(function (){alert("Login failed due to server connection error!")});
}
function fnRegister(){
var aName =new Array();
var aValue=new Array();
var aaValue=new Array();
$('#register_form input').each(function(){
aName.push(this.id);
if (this.id=="password"||this.id=="repeat_password"){
aValue.push(Sha1.hash($(this).val()));
}
else{
aValue.push($(this).val());
}
});
aaValue.push(aValue);
var sName=array2json(aName);
var sValue=array2json(aaValue);
$.post(
"server_side_user_record_processing.php",
{action: "add", keys: sName, values: sValue},
function (data){
if (data["changed"]==0){
$('#register_message').html(data["errors"]);
}
else{
$('#register_dialog').dialog("close");
}
},
"json"
).error(function (){alert("Register failed due to server connection error!")});
}
function fnFormatRegisterDialog(){
$('#register_message').html("Please Enter");
$('#register_form input').val("");
$('#register_dialog').dialog("open");
}
$('document').ready(function(){
$('#login_dialog').dialog({
autoOpen: true,
width: 300,
modal: false,
open: function(event, ui) { $(this).parent().find(".ui-dialog-titlebar-close").hide(); },
buttons: {
"OK": function() {
fnLogin();
},
"New User": function() {
fnFormatRegisterDialog();
}
}
});
$('#register_dialog').dialog({
autoOpen: false,
width: 300,
modal: true,
buttons: {
"OK": function() {
fnRegister();
},
"Cancel": function() {
$( this ).dialog( "close" );
}
}
});
}
);
</script>
</head>
<body>
<div id="login_dialog" title="Cloveriver Login">
<label id="login_message">Please Enter</label>
<form id="login_form">
<fieldset>
<label>Email</label><br/><input type="text" autocomplete="on" name="Email" id="login_email" class="text ui-widget-content ui-corner-all"/><br/>
<label>Password</label><br/><input type="<PASSWORD>" name="Password" id="login_password" class="text ui-widget-content ui-corner-all"/><br/>
</fieldset>
</form>
</div>
<div id="register_dialog" title="Register New User">
<div id="register_message">Please Enter</div>
<form id="register_form" >
<fieldset>
<label>Name</label><br/><input type="text" name="Name" id="name" class="text ui-widget-content ui-corner-all"/><br/>
<label>Email</label><br/><input type="text" name="Email" id="email" class="text ui-widget-content ui-corner-all"/><br/>
<label>Password</label><br/><input type="<PASSWORD>" name="Password" id="password" class="text ui-widget-content ui-corner-all"/><br/>
<label>Repeat Password</label><br/><input type="password" name="Repeat Password" id="repeat_password" class="text ui-widget-content ui-corner-all"/><br/>
<label>Description</label><br/><input type="text" name="Description" id="description" class="text ui-widget-content ui-corner-all"/><br/>
</fieldset>
</form>
</div>
</body>
<?php
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
?>
<file_sep>/about.php
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">
<?php
include_once '_basics.php';
CheckUser();
?>
<html>
<head>
<meta http-equiv="content-type" content="text/html; charset=utf-8">
<title>Cloveriver</title>
<link rel="icon" href="icons/clover.ico" />
<style type="text/css" title="currentStyle">
@import "css/page.css";
@import "css/jquery-ui-1.8.16.custom.css";
</style>
<script type="text/javascript" src="js/jquery.js"></script>
<script type="text/javascript" src="js/jquery.dataTables.js"></script>
<script type="text/javascript" src="js/jquery-ui-1.8.16.custom.min.js"></script>
<script type="text/javascript" src="js/common.js"></script>
<script type="text/javascript">
$(document).ready(function (){
$("#messenger_dialog").hide();
});
</script>
</head>
<body>
<?php
Menu();
?>
<img src="css/images/about.png"/>
</body>
</html> <file_sep>/logout.php
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">
<?php
include_once '_basics.php';
CheckUser();
?>
<html>
<head>
<meta http-equiv="content-type" content="text/html; charset=utf-8">
<title>Cloveriver</title>
<link rel="icon" href="icons/clover.ico" />
<style type="text/css" title="currentStyle">
@import "css/page.css";
@import "css/jquery-ui-1.8.16.custom.css";
</style>
<script type="text/javascript" src="js/jquery.js"></script>
<script type="text/javascript" src="js/jquery.dataTables.js"></script>
<script type="text/javascript" src="js/jquery-ui-1.8.16.custom.min.js"></script>
<script type="text/javascript" src="js/common.js"></script>
<script type="text/javascript" charset="utf-8">
function fnLogout (){
$.post(
"server_side_logout.php",
{},
function(data) {
if (data=="OK"){
window.location.href="index.php";
}
else{
fnMessenger("warning", "Logout failed!");
}
},
"json"
).error(function(){fnMessenger("error", "Server error!");});
}
$(document).ready(function(){
//prepare logout_dialog;
$('#logout_dialog').dialog({
autoOpen: true,
resizable: false,
height: 160,
modal: false,
open: function(event, ui) { $(this).parent().find(".ui-dialog-titlebar-close").hide(); },
buttons: {
"Yes": function() {
fnLogout ();
}
}
});
$("#messenger_dialog").hide();
});
</script>
</head>
<body>
<?php
Menu();
?>
<div id="logout_dialog" title="Logout Confirmation"><p><span class="ui-icon ui-icon-alert" style="float:left; margin:0 7px 20px 0;"></span>Do you really want to logout?</p></div>
</body>
</html> <file_sep>/server_side_logout.php
<?php
/*logout
*/
session_name("cloveriver");
session_start();
unset($_SESSION["user_id"]);
session_unset();
session_destroy();
echo json_encode("OK");
?>
<file_sep>/help.php
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">
<?php
include_once '_basics.php';
CheckUser();
?>
<html>
<head>
<meta http-equiv="content-type" content="text/html; charset=utf-8">
<title>Cloveriver</title>
<link rel="icon" href="icons/clover.ico" />
<style type="text/css" title="currentStyle">
@import "css/page.css";
@import "css/jquery-ui-1.8.16.custom.css";
</style>
<script type="text/javascript" src="js/jquery.js"></script>
<script type="text/javascript" src="js/jquery.dataTables.js"></script>
<script type="text/javascript" src="js/jquery-ui-1.8.16.custom.min.js"></script>
<script type="text/javascript" src="js/common.js"></script>
<script type="text/javascript" charset="utf-8">
$(document).ready(function(){
$('.accordionhead').click(function() {
$(this).next().slideToggle(500);
return false;
})
$("#messenger_dialog").hide();
});
</script>
</head>
<body>
<?php
Menu();
?>
<div id="container">
<div class="accordion">
<div class="ui-widget-header ui-corner-all accordionhead">Tips</div>
<div class="text ui-widget-content ui-corner-all accordionbody">
<div style="margin:5px">
<img src="icons/arrow_up.png"/>If click the head bar like above, its content may toggle (show/hide).<br/>
<img src="icons/bullet_star.png"/>Type in the "Search" input box at the right top corner of a table to quickly find what you are looking for.<br/>
<img src="icons/bullet_star.png"/>Click on a user's name to send an email to him/her.<br/>
<img src="icons/bullet_star.png"/>Go to "Roster" to find the person you want to contact.<br/>
<img src="icons/bullet_star.png"/>Cloveriver is not just for equipment. The equipment can be a room, or even a person who takes reservations.<br/>
<img src="icons/bullet_star.png"/>If you are admin or manager of equipment, click <img src="icons/email.png"> to generate email list that you can copy to your email client to use.<br/>
<img src="icons/bullet_star.png"/>Check the status of the equipment before you schedule it.<br/>
<img src="icons/bullet_star.png"/>Hover your mouse to an icon to check what it says, or you can check the following list.<br/>
</div>
</div>
</div>
<div class="spacer"></div>
<div class="accordion">
<div class="ui-widget-header ui-corner-all accordionhead">Meanings of Icons</div>
<div class="text ui-widget-content ui-corner-all accordionbody">
<div style="margin: 5px">
<img src="icons/user_edit.png"/> Edit user Position of in the user group of an equipment.<br/>
<img src="icons/tag_blue_add.png"/> Reserve this equipment<br/>
<img src="icons/page_white_text_width.png"/> Details about this record/user/equipment<br/>
<img src="icons/page_white_edit.png"/> Edit this record/user/equipment<br/>
<img src="icons/key.png"/> Edit the password of this user<br/>
<img src="icons/exclamation.png"/> This status of this equipment is “broken”<br/>
<img src="icons/email.png"/> Generate email list that you can use in your email client<br/>
<img src="icons/delete.png"/> Remove this user from the user group of this equipment<br/>
<img src="icons/cross.png"/> Delete record/user/equipment<br/>
<img src="icons/comment.png"/> There is a description on this reservation<br/>
<img src="icons/cancel.png"/> Cloveriver has encountered errors<br/>
<img src="icons/bullet_error.png"/> Cloveriver does not function properly<br/>
<img src="icons/bug_error.png"/> The status of this equipment is problematic<br/>
<img src="icons/add.png"/> Add equipment/a user<br/>
<img src="icons/accept.png"/> The status of the equipment is normal or Cloveriver has processed your request<br/>
<img src="icons/user_go.png"/> Apply to the user group of this equipment <br/>
</div>
</div>
</div>
<div class="spacer"></div>
<div class="accordion">
<div class="ui-widget-header ui-corner-all accordionhead">FAQs</div>
<div class="text ui-widget-content ui-corner-all accordionbody">
<div style="margin: 5px">
<img src="icons/user_comment.png"/>I am a new user, but I cannot schedule the equipment I need. Why?<br/>
<img src="icons/bell.png"/>You need go to "Equipment" page, find the equipment you need, and click <img src="icons/user_go.png"/> to apply to the user group of that equipment.<br/>
<img src="icons/bullet_green.png"/>At this point you need to wait (or maybe contact the manager of the equipment which you can find in the "User Rights" page) for the manager to change your Position to member.<br/>
<img src="icons/bullet_green.png"/>Then the equipment will show on the "Scheduler" page, and everything will work.<br/>
<br/>
<img src="icons/user_comment.png"/>How can I add an equipment?<br/>
<img src="icons/bell.png"/>Only "admin" has the right to add and equipment<br/>
<img src="icons/bullet_green.png"/>Go to the "Equipment" page, click the <img src="icons/add.png"/> at the top of the table, and add the equipment.<br/>
<img src="icons/bullet_green.png"/>You need somebody (including you) apply to the user group of the new equipment,and make him/her/yourself a member/manager of that equipment user group<br/>
<img src="icons/bullet_green.png"/>Then that person can schedule the equipment<br/>
<br/>
<img src="icons/user_comment.png"/>What's the difference between "user", "member", "admin", and so on?<br/>
<img src="icons/bell.png"/>Cloveriver has three user types which are all together called "Identity" in the system.<br/>
<img src="icons/bullet_green.png"/>The three types of "Identities" are: "visitor", "user", and "admin". Visitors are the new user who just registered in Cloveriver, but has not been activated by the "admin". Users are normal users, and admins are the people have the highest power in the system. Admins can view/change/delete user or equipment information, so they should be honest responsible people.<br/>
<img src="icons/bullet_green.png"/>Each equipment has a user group, and only the member/manager can schedule that equipment. Each user group has three type of "Positions": "applicant", "member", and "manager". Applicants are the users who just applied to the user group but have not been approved by the manager. Members are just members, and managers are the ones in charge of managing the user group of that equipment.<br/>
<img src="icons/bullet_green.png"/>Cloveriver can have more than one admin in the system, and each equipment user group can have more than one manager.
<br/>
<img src="icons/user_comment.png"/>I am a manager of a equipment. How can I send an email to all the members in this equipment user group?<br/>
<img src="icons/bell.png"/>You cannot send email through Cloveriver, since we do not have a valid mail server in the Internet.<br/>
<img src="icons/bullet_green.png"/>However, Cloveriver will generate the email list of that equipment user group for you. The only thing you need to do is click the <img src="icons/email.png"/> in the "Actions" column of equipment list.<br/>
<br/>
<img src="icons/user_comment.png"/>Why this system is called Cloveriver?<br/>
<img src="icons/bell.png"/>Clover a simple Lab Information Management System developed by me (Xiao).<br/>
<img src="icons/bullet_green.png"/>Cloveriver belongs to that system but runs independently. The other part is called Clover Sundew<br/>
<img src="icons/bullet_green.png"/>The "river" in Cloveriver stands for Reserve Is Very Easy Right here, which means Cloveriver is a scheduling system.<br/>
<img src="icons/bullet_green.png"/>If you are interested in Clover Sundew, please let me know<br/>
<br/>
<img src="icons/user_comment.png"/>I accidentally deleted something. What should I do?<br/>
<img src="icons/bell.png"/>Nothing in Cloveriver will be actually deteled. They are just marked as deleted.<br/>
<img src="icons/bullet_green.png"/>If you are an admin/manager and accidentally deleted a user or an equipment, please find me (Xiao). Information can be recovered.<br/>
</div>
</div>
</div>
<div class="spacer"></div>
© <NAME>, 2012
<div class="spacer"></div>
</div>
</body>
</html> <file_sep>/README.md
# Cloveriver
An online scheduler
Cloveriver is a side project of Clover, a Simple Lab Information Management System (LIMS). It is used to manage and schedule lab instruments/equipment/rooms.
For Clover, please visit https://sourceforge.net/projects/cloversystem/ .
Cloveriver is very easy to use. Please visit https://sourceforge.net/p/cloveriver/wiki/Home/ for instructions.
I wrote this project in a hurry. The code is not pretty but has been tested for over 1 year to be 100% functional.
<file_sep>/server_side_equipment_ajax_search.php
<?php
include_once('_basics.php');
CheckUser();
$cOpt=new OperateEquipment();
echo $cOpt->AjaxSearch();
?>
<file_sep>/server_side_user_equipment_reservation_record_processing.php
<?php
include_once ('_basics.php');
CheckUser();
$cOpt=new OperateUserEquipmentReservations();
if($_POST["action"]=="reserve"){
$aRev=Array(
"user_id"=>$cOpt->iSessionUserId,
"equipment_id"=>$_POST["equipment_id"],
"date"=>$_POST["date"],
"from"=>$_POST["from"],
"to"=>$_POST["to"],
"description"=>$_POST["description"]
);
if ($cOpt->checkTimeConflict($aRev["equipment_id"], $aRev["date"], $aRev["from"], $aRev["to"])){
$aaData=Array(
"changed"=>0,
"errors"=>"The time slot you chose conflicts with others. Please reselect."
);
$jResult=json_encode($aaData);
}
else{
$aaData=Array($aRev);
$jResult=$cOpt->AjaxAdd($aaData);
}
}
elseif($_POST["action"]=="list") {
$jResult=$cOpt->AjaxGetReservation($_POST["date"], $_POST["equipment_id"]);
}
elseif($_POST["action"]=="delete_reservation"){
$aIds=Array($_POST["id"]);
$jResult=$cOpt->deleteRecords($aIds);
}
echo $jResult;
?>
<file_sep>/server_side_user_equipment_rights_record_processing.php
<?php
include_once('_basics.php');
CheckUser();
$cOpt = new OperateUserEquipmentRights();
$sErrors = "";
if (!$cOpt->sSessionUserType == "admin") {
foreach ($aIds as $iId) {
$aRecord['user_id'] = $cOpt->iSessionUserId;
$aRecord['equipment_id'] = $iId;
$aRecord['position'] = 'manager';
if (!$cOpt->checkIfRecordExist($aRecord)) {
$sErrors.="You do not have rights to perform this operation.<br/>";
break;
}
}
}
if ($sErrors == "") {
if ($_POST['action'] == 'update') {
$aaValues = json_decode($_POST["values"], true);
$aKeys = json_decode($_POST['keys'], true);
for ($i = 0; $i < count($aaValues); $i++) {
for ($j = 0; $j < count($aKeys); $j++) {
$aaData[$i][$aKeys[$j]] = $aaValues[$i][$j];
}
}
$jResult = $cOpt->AjaxUpdate($aaData);
} elseif ($_POST['action'] == 'delete') {
$aIds = explode(",", $_POST["sIds"]);
$jResult = $cOpt->deleteRecords($aIds);
} elseif ($_POST["action"] == 'edit') {
$jResult = $cOpt->AjaxRead($_POST['iId'], "edit");
}
}else{
$aErrors=Array("changed"=>0, "errors"=>$sErrors);
$jResult=json_encode($aErrors);
}
echo $jResult;
?>
<file_sep>/server_side_user_ajax_search.php
<?php
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*
*
*/
include_once("_basics.php");
CheckUser();
$cUser=new OperateUsers();
echo $cUser->AjaxSearch();
?>
<file_sep>/setup.php
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">
<?php
include_once '_basics.php';
?>
<html>
<head>
<meta http-equiv="content-type" content="text/html; charset=utf-8" />
<title>Cloveriver</title>
<link rel="icon" href="icons/clover.ico" />
<style type="text/css" title="currentStyle">
@import "css/jquery-ui-1.8.16.custom.css";
@import "css/page.css";
</style>
<script type="text/javascript" src="js/jquery.js"></script>
<script type="text/javascript" src="js/jquery-ui-1.8.16.custom.min.js"></script>
<script type="text/javascript" src="js/common.js"></script>
<script type="text/javascript" charset="utf-8">
function fnRegister(){
var aName =new Array();
var aValue=new Array();
var aaValue=new Array();
$('#register_form input').each(function(){
aName.push(this.id);
if (this.id=="password"||this.id=="repeat_password"){
aValue.push(Sha1.hash($(this).val()));
}
else{
aValue.push($(this).val());
}
});
aaValue.push(aValue);
var sName=array2json(aName);
var sValue=array2json(aaValue);
$.post(
"server_side_setup_processing.php",
{keys: sName, values: sValue},
function (data){
if (data["changed"]==0){
$('#register_message').html(data["errors"]);
}
else{
window.location.href="scheduler.php";
}
},
"json"
).error(function (){alert("Setup failed due to server error!")});
}
function fnFormatRegisterDialog(){
$('#register_message').html("Please Enter");
$('#register_form input').val("");
$('#register_dialog').dialog("open");
}
$('document').ready(function(){
$('#register_dialog').dialog({
autoOpen: true,
width: 300,
modal: false,
open: function(event, ui) { $(this).parent().find(".ui-dialog-titlebar-close").hide(); },
buttons: {
"OK": function() {
fnRegister();
}
}
});
}
);
</script>
</head>
<body>
<?php
$whitelist = array('localhost', '127.0.0.1');
if (!in_array($_SERVER['HTTP_HOST'], $whitelist)) {
echo 'Forbidden! Please visit this page using "localhost".';
}
?>
<div id="register_dialog" title="Clover Setup">
<div id="register_message">Create an Administrator Account</div>
<form id="register_form" >
<fieldset>
<label>Name</label><br/><input type="text" name="Name" id="name" class="text ui-widget-content ui-corner-all"/><br/>
<label>Email</label><br/><input type="text" name="Email" id="email" class="text ui-widget-content ui-corner-all"/><br/>
<label>Password</label><br/><input type="<PASSWORD>" name="Password" id="password" class="text ui-widget-content ui-corner-all"/><br/>
<label>Repeat Password</label><br/><input type="<PASSWORD>" name="Repeat Password" id="<PASSWORD>_password" class="text ui-widget-content ui-corner-all"/><br/>
<label>Description</label><br/><input type="text" name="Description" id="description" class="text ui-widget-content ui-corner-all"/><br/>
</fieldset>
</form>
</div>
</body>
<?php
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
?>
<file_sep>/_basics.php
<?php
/* Contain basic classes that handle
* connection to Clover database
* creations of Clvoer database
*
* bst: basic system table
* dbs: database services
*/
function CheckUser() {
if (!isset($_SESSION['user_id'])) {
session_name("cloveriver");
session_start();
}
if (!isset($_SESSION['user_id'])) {
header('Location: index.php');
}
}
function Menu() {
$cOpt = new OperateUsers();
echo '<div id="menubar">
<div id="menu">
<ul>
<li class="clover">
<p class="cloversubtext"><a href="mailto:<EMAIL>" title="Contact <NAME>">Developed by<br/>© <NAME> 2012</a></p>
</li>
<li class="green">
<p><a href="scheduler.php">Scheduler</a></p>
<p class="subtext">Arrange your time</p>
</li>
<li class="orange">
<p><a href="equipment.php">Equipment</a></p>
<p class="subtext">What we can use</p>
</li>
<li class="red">
<p><a href="equipment_users.php">User Rights</a></p>
<p class="subtext">Who\'s using it</p>
</li>
<li class="cyan">
<p><a href="roster.php">Roster</a></p>
<p class="subtext">Meet your friends</p>
</li>
<li class="yellow">
<p><a href="about.php">About</a></p>
<p class="subtext">Know me more</p>
</li>
<li class="blue">
<p><a href="help.php">Help</a></p>
<p class="subtext">How may I help you</p>
</li>
<li class="pink">
<p>' . $cOpt->sSessionUserName . '</p>
<p class="subtext"><a href="logout.php">Logout<br/>May I see you again</a></p>
</li>
</ul>
</div>
<div id="menuright"></div>
</div>
<div id="messenger_dialog"><img/><label></label></div>
';
}
function sha256($sString) {
return hash("sha256", $sString);
}
class Connect2Clover {
public $sUsername = "root";
public $sPassword = "";
public $sHost = "127.0.0.1";
public $aErrors = array();
public $sCloverDatabaseName = "cloveriver";
public $timezone = "America/New_York";
public $datetimeformat = "m/d/Y, g:i a";
public $dateformat = "m/d/Y";
public $sSessionUserType;
public $sSessionUserName;
public $sSessionUserEmail;
public $iSessionUserId;
function __construct() {
date_default_timezone_set($this->timezone) or $this->aErrors[] = "System Error: cannot set timezone!";
$rLink = mysql_connect($this->sHost, $this->sUsername, $this->sPassword) or
$this->aErrors[] = "Database Error: cannot connect to mySQL datebase!";
mysql_query("SET NAMES 'UTF8'");
mysql_select_db($this->sCloverDatabaseName) or
$this->aErrors[] = "Database Error: cannot select the Clover database!";
//start sesssion;
if (!isset($_SESSION['user_id'])) {
session_name("cloveriver");
session_start();
}
if (isset($_SESSION["user_id"])) {
$sQuery = "SELECT * FROM `bst_users` WHERE `id`='" . $_SESSION["user_id"] . "' AND `identity`!='visitor' AND `deleted`='0'";
$rResult = $this->queryClover($sQuery);
if ($rResult) {
$aRow = mysql_fetch_array($rResult);
if ($aRow) {
$this->sSessionUserEmail = $aRow["email"];
$this->sSessionUserName = $aRow["name"];
$this->sSessionUserType = $aRow["identity"];
$this->iSessionUserId = $_SESSION["user_id"];
}
} else {
$this->aErrors[] = "Database Error: cannot obtain user information!";
}
}
}
public function queryClover($sQuery) {
$rResult = mysql_query($sQuery) or $this->aErrors[] = "Database Error: cannot query the Clover database with query: " . $sQuery;
return $rResult;
}
function __destruct() {
if (count($this->aErrors)) {
foreach ($this->aErrors as &$sError) {
print_r($sError . '<br/>');
}
}
}
}
abstract class BasicSystemTables {
public static $aTables = array("bst_users", "bst_equipment", "bst_user_equipment_rights", "bst_user_equipment_reservations");
//bst=basic system table
//
//the users table structure
public static $bst_users = array(
"email" => array("name" => "email", "label" => "Email", "brief" => "3", "edit" => "3", "detail" => "3", "data_type" => "char(64) COLLATE utf8_unicode_ci NOT NULL", "function" => "email2Link(email)"), /* User ID: user email */
"password" => array("name" => "<PASSWORD>", "label" => "<PASSWORD>", "brief" => "0", "edit" => "4", "detail" => "0", "data_type" => "char(64)COLLATE utf8_unicode_ci NOT NULL"), /* Password */
"name" => array("name" => "name", "label" => "Name", "brief" => "2", "edit" => "2", "detail" => "2", "data_type" => "varchar(128)COLLATE utf8_unicode_ci NOT NULL"), /* User's Name */
"identity" => array("name" => "identity", "label" => "Identity", "brief" => "5", "edit" => "5", "detail" => "5", "data_type" => "enum('visitor','user','admin') COLLATE utf8_unicode_ci NOT NULL DEFAULT 'visitor'"), /* user type */
"description" => array("name" => "description", "label" => "Description", "brief" => "6", "edit" => "6", "detail" => "6", "data_type" => "varchar(128)COLLATE utf8_unicode_ci NOT NULL")
);
//the equipment structure
public static $bst_equipment = array(
"name" => array("name" => "name", "label" => "Equipment Name", "brief" => "3", "edit" => "3", "detail" => "3", "data_type" => "varchar(128) COLLATE utf8_unicode_ci NOT NULL"),
"location" => array("name" => "location", "label" => "Equipment Location", "brief" => "4", "edit" => "4", "detail" => "4", "data_type" => "varchar(128)COLLATE utf8_unicode_ci NOT NULL"),
"status" => array("name" => "status", "label" => "Status", "brief" => "5", "edit" => "5", "detail" => "5", "data_type" => "enum('normal','problematic','broken') COLLATE utf8_unicode_ci NOT NULL DEFAULT 'normal'", "function" => "status2Icon(status)"),
"status_detail" => array("name" => "status_detail", "label" => "Status Detail", "brief" => "6", "edit" => "6", "detail" => "6", "data_type" => "varchar(128) COLLATE utf8_unicode_ci NOT NULL"),
"description" => array("name" => "description", "label" => "Description", "brief" => "7", "edit" => "7", "detail" => "7", "data_type" => "varchar(256)COLLATE utf8_unicode_ci NOT NULL")
);
//the user_equipment_rights structure
public static $bst_user_equipment_rights = array(
"user_id" => array("name" => "user_id", "label" => "User", "brief" => "3", "edit" => "0", "detail" => "3", "data_type" => "int(12) unsigned NOT NULL", "function" => "userID2Name(user_id)"),
"equipment_id" => array("name" => "equipment_id", "label" => "Equipment", "brief" => "4", "edit" => "0", "detail" => "4", "data_type" => "int(12) unsigned NOT NULL", "function" => "equipmentID2Name(equipment_id)"),
"position" => array("name" => "position", "label" => "Position", "brief" => "5", "edit" => "5", "detail" => "5", "data_type" => "enum('applicant','member','manager') COLLATE utf8_unicode_ci NOT NULL DEFAULT 'applicant'"),
);
//the user_equipment_reseration structure
public static $bst_user_equipment_reservations = array(
"user_id" => array("name" => "user_id", "label" => "User ID", "brief" => "3", "edit" => "3", "detail" => "3", "data_type" => "int(12) unsigned NOT NULL", "function" => "userID2Name(user_id)"), /* User ID: user email */
"equipment_id" => array("name" => "equipment_id", "label" => "Equipment ID", "brief" => "4", "edit" => "4", "detail" => "4", "data_type" => "int(12) unsigned NOT NULL"), /* Password */
"date" => array("name" => "date", "label" => "Date", "brief" => "5", "edit" => "5", "detail" => "5", "data_type" => "DATE NOT NULL"), /* user type */
"from" => array("name" => "from", "label" => "Reserved from", "brief" => "6", "edit" => "6", "detail" => "6", "data_type" => "INT( 2 ) NOT NULL"), /* user type */
"to" => array("name" => "to", "label" => "Reserved to", "brief" => "7", "edit" => "7", "detail" => "7", "data_type" => "INT( 2 ) NOT NULL"), /* user type */
"description" => array("name" => "description", "label" => "Description", "brief" => "8", "edit" => "8", "detail" => "8", "data_type" => "varchar(256)COLLATE utf8_unicode_ci NOT NULL", "function" => "comment2Icon(description)")
);
public static $aaDefault = array(
"id" => array("name" => "id", "label" => "ID", "brief" => "1", "edit" => "0", "detail" => "1", "data_type" => "int(12) unsigned NOT NULL AUTO_INCREMENT, PRIMARY KEY (`id`)"),
"deleted" => array("name" => "deleted", "brief" => "0", "edit" => "0", "detail" => "0", "label" => "Deleted", "data_type" => "int(12) unsigned NOT NULL DEFAULT '0'"),
"last_modified_by" => array("name" => "last_modified_by", "brief" => "0", "edit" => "0", "detail" => "254", "label" => "Last Modified by", "data_type" => "int(12) unsigned NOT NULL DEFAULT '0'", "function" => "userID2Name(last_modified_by)"),
"last_modified_on" => array("name" => "last_modified_on", "brief" => "0", "edit" => "0", "detail" => "255", "label" => "Last Modified on", "data_type" => "varchar(64)COLLATE utf8_unicode_ci NOT NULL")
//array("name"=>"shared_with", "label"=>"Shared with", "data_type"=>"enum('not shared', 'group', 'public') COLLATE utf8_unicode_ci NOT NULL DEFAULT 'not shared'"),
//array("name"=>"share_policy", "label"=>"Share Policy", "data_type"=>"enum('name only','private', 'protected', 'public') COLLATE utf8_unicode_ci NOT NULL DEFAULT 'name only'")
);
}
class OperateTables extends Connect2Clover {
public $sTableName; //database service name, ie. the table name.
public $aaTableStructure = array();
public $aActionIcons = array();
public $aActionAuths = array();
function __construct($sTableName, &$aaTableStructure) {
parent::__construct();
$this->sTableName = $sTableName;
$this->aaTableStructure = array_merge(BasicSystemTables::$aaDefault, $aaTableStructure);
}
function getFields(&$aFields, $sColumn = "name", $sType = "system") {//type: system-all the fields, detail-detail view for users, edit-edit view for users, brief-brief view for users.
$aTemp = array();
foreach ($this->aaTableStructure as &$aStruct) {
if ($sType == "system") {
$aFields[] = $aStruct[$sColumn];
} elseif ($aStruct[$sType] != 0) {
$aTemp[$aStruct[$sType]] = $aStruct[$sColumn];
}
}
//sort Fields;
if ($sType == "system") {
asort($aFields);
} else {
ksort($aTemp); //sort fields by their Key, which is the defined value in each view type.
foreach ($aTemp as &$value) {
$aFields[] = $value;
}
}
}
function checkIfRecordExist($aRecord) { //return ture if exist.
$sQuery = "SELECT `id` FROM `" . $this->sTableName . "` WHERE `deleted`=0 AND ";
$aTemp = array();
foreach ($aRecord as $key => $value) {
$aTemp[] = "`" . $key . "`='" . mysql_real_escape_string($value) . "'";
}
$sQuery.=implode(" AND ", $aTemp);
$rResult = $this->queryClover($sQuery);
return mysql_num_rows($rResult);
}
function checkIfRecordIDExist($iId) {
$sQuery = "SELECT `id` FROM `" . $this->sTableName . "` WHERE `id`='" . $iId . "'";
$rResult = $this->queryClover($sQuery);
return mysql_num_rows($rResult);
}
public function userID2Name($index, &$aRow) {
$iId = $aRow[$index];
$sQuery = "SELECT name, email FROM bst_users WHERE `deleted`=0 AND `id`='" . $iId . "'";
$rResult = $this->queryClover($sQuery);
$row = mysql_fetch_array($rResult);
if ($row) {
return '<a href="mailto:' . $row["email"] . '" title="Contact ' . $row["name"] . ': ' . $row["email"] . '">' . $row["name"] . '</a>';
} else
return "User was deleted";
}
public function comment2Icon($index, &$aRow) {
if ($aRow[$index] == "") {
return "";
} else {
return '<img src="icons/comment.png" title="' . $aRow[$index] . '"/>';
}
}
public function equipmentID2Name($index, &$aRow) {
$iId = $aRow[$index];
$sQuery = "SELECT name FROM bst_equipment WHERE `deleted`=0 AND `id`='" . $iId . "'";
$rResult = $this->queryClover($sQuery);
if ($rResult) {
$row = mysql_fetch_array($rResult);
if ($row) {
return $row["name"];
} else {
return "Equipment was deleted";
}
} else {
return $iId;
}
}
public function readRecord($id, $sType = "system", $withLabel = false) {
$aResult = array();
$aColumns = array();
$aLabel = array();
$this->getFields($aColumns, "name", $sType);
$this->getFields($aLabel, "label", $sType);
$sQuery = "SELECT " . str_replace(" , ", " ", implode(", ", $aColumns)) . "
FROM " . $this->sTableName . " WHERE `id`='" . $id . "'";
$rResult = $this->queryClover($sQuery);
while ($aRow = mysql_fetch_array($rResult)) {
for ($i = 0; $i < count($aColumns); $i++) {
if (isset($this->aaTableStructure[$aColumns[$i]]["function"]) && $withLabel) {
$aResult[($withLabel ? $aLabel[$i] : $aColumns[$i])] = $this->inLineFunction($this->aaTableStructure[$aColumns[$i]]["function"], $aRow);
} else {
$aResult[($withLabel ? $aLabel[$i] : $aColumns[$i])] = $aRow[$aColumns[$i]];
}
}
}
return $aResult;
}
public function addRecords(&$aaData) {//return inserted id;
if (count($aaData)) {
$aName = array();
$this->getFields($aName);
$sQuery = "INSERT INTO `" . $this->sTableName . "` ( `" . implode("`, `", $aName) . "` ) VALUES ";
$aTemp = array();
foreach ($aaData as &$aRecord) {
$aRecord["id"] = "";
$aRecord["deleted"] = 0;
$aRecord["last_modified_by"] = $this->iSessionUserId;
$aRecord["last_modified_on"] = date($this->datetimeformat);
foreach ($aRecord as &$sData) {
$sData = mysql_real_escape_string($sData);
}
ksort($aRecord);
$aTemp[] = "('" . implode("', '", $aRecord) . "')";
}
$sQuery.=implode(",", $aTemp);
$this->queryClover($sQuery);
return mysql_insert_id();
} else {
return 0;
}
}
public function deleteRecords(&$aIds) {//return the number of the successfully deleted records.
$iResult = 0;
foreach ($aIds as &$iId) {
$sQuery = "UPDATE `" . $this->sTableName . "` SET `last_modified_by` ='" . mysql_real_escape_string($this->iSessionUserId) . "', `last_modified_on`='" . date($this->datetimeformat) . "', `deleted`=" . $iId . " WHERE `id`='" . $iId . "'";
if ($this->queryClover($sQuery)) {
$iResult++;
}
}
return $iResult;
}
public function checkIdentity($sRules, &$aRow) {
$aRules = explode(",", $sRules);
if (in_array($this->sSessionUserType, $aRules) !== false) {
return true;
} elseif ((in_array("self", $aRules) !== false) && (isset($aRow["email"]) ? ($aRow["email"] == $this->sSessionUserEmail) : ($aRow["user_id"] == $this->iSessionUserId))) {
return true;
}
}
public function checkPosition($sRules, &$aRow) {
$position = "";
$sQuery = "SELECT `position`
FROM bst_user_equipment_rights
WHERE `deleted`=0 AND `equipment_id`=" . $aRow['equipment_id'] . " AND user_id=$this->iSessionUserId";
$rResult = $this->queryClover($sQuery);
if ($rResult) {
while ($row = mysql_fetch_array($rResult)) {
$position = $row["position"];
}
$aRules = explode(",", $sRules);
return (in_array($position, $aRules) !== false);
} else {
return false;
}
}
public function inLineFunction($sFunction, &$aRow) {//$aRow is the Row from table search;
$pattern = '/([\|\&\.])?(!)?([^\|\&\.]+)/';
preg_match_all($pattern, $sFunction, $aaMatches, PREG_SET_ORDER);
$pattern = '/([^\(\)]+)\(([^\(\)]+)?\)/';
$bResult;
foreach ($aaMatches as &$aMatch) {
preg_match($pattern, $aMatch[3], $aFn);
if ($aMatch[1] == "|") {
$bResult = $bResult || ($aMatch[2] == "!" ? !$this->$aFn[1]($aFn[2], $aRow) : $this->$aFn[1]($aFn[2], $aRow));
} elseif ($aMatch[1] == "&") {
$bResult = $bResult && ($aMatch[2] == "!" ? !$this->$aFn[1]($aFn[2], $aRow) : $this->$aFn[1]($aFn[2], $aRow));
} elseif ($aMatch[1] == ".") {
$bResult = $bResult . $this->$aFn[1]($aFn[2], $aRow);
} else {
$bResult = ($aMatch[2] == "!" ? !$this->$aFn[1]($aFn[2], $aRow) : $this->$aFn[1]($aFn[2], $aRow));
}
}
return $bResult;
}
public function updateRecords(&$aaData) { //return the number of the successfully updated records
$iResult = 0;
foreach ($aaData as &$aRecord) {
// insert new records;
if ($this->checkIfRecordIDExist($aRecord['id'])) {
//maker a new copy of the old record;
$aOldRecord = $this->readRecord($aRecord['id'], "system", FALSE);
$aaRecords = array($aOldRecord);
$iLastId = $this->addRecords($aaRecords);
//change the deleted field of the new copy to its original ID; this will serve as the deleted one in the trash can;
$sQuery = "UPDATE `" . $this->sTableName . "` SET `deleted`=" . $aRecord['id'] . " WHERE `id`='" . $iLastId . "'";
$this->queryClover($sQuery);
//update the old record information;
$sQuery = "UPDATE `" . $this->sTableName . "` SET ";
foreach ($aRecord as $sKey => &$sValue) {
$sQuery.="`" . $sKey . "`='" . mysql_real_escape_string($sValue) . "',";
}
$sQuery.="`last_modified_by`='" . $this->iSessionUserId . "',";
$sQuery.="`last_modified_on`='" . mysql_real_escape_string(date($this->datetimeformat)) . "'";
$sQuery.=" WHERE `id`='" . $aRecord['id'] . "'";
$this->queryClover($sQuery);
$iResult++;
}
}
return $iResult;
}
public function AjaxSearch() {
/*
* Script: DataTables server-side script for PHP and MySQL
* Copyright: 2010 - <NAME>
* License: GPL v2 or BSD (3-point)
*/
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* Easy set variables
*/
/* Array of database columns which should be read and sent back to DataTables. Use a space where
* you want to insert a non-database field (for example a counter or static image)
*/
//Get the Columns of the table.
$aColumns = array();
$this->getFields($aColumns, "name", "brief");
/* Indexed column (used for fast and accurate table cardinality) */
$sIndexColumn = "id";
/* DB table to use */
$sTable = $this->sTableName;
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* If you just want to use the basic configuration for DataTables with PHP server-side, there is
* no need to edit below this line
*/
/*
* Paging
*/
$sLimit = "";
if (isset($_GET['iDisplayStart']) && $_GET['iDisplayLength'] != '-1') {
$sLimit = "LIMIT " . mysql_real_escape_string($_GET['iDisplayStart']) . ", " .
mysql_real_escape_string($_GET['iDisplayLength']);
}
/*
* Ordering
*/
if (isset($_GET['iSortCol_0'])) {
$sOrder = "ORDER BY ";
for ($i = 0; $i < intval($_GET['iSortingCols']); $i++) {
if ($_GET['bSortable_' . intval($_GET['iSortCol_' . $i])] == "true") {
$sOrder .= $aColumns[intval($_GET['iSortCol_' . $i]) - 1] . "
" . mysql_real_escape_string($_GET['sSortDir_' . $i]) . ", "; //intval( $_GET['iSortCol_'.$i])-1, the -1 is to cancel out the first actions column;
}
}
$sOrder = substr_replace($sOrder, "", -2);
if ($sOrder == "ORDER BY") {
$sOrder = "";
}
}
/*
* Filtering
* NOTE this does not match the built-in DataTables filtering which does it
* word by word on any field. It's possible to do here, but concerned about efficiency
* on very large tables, and MySQL's regex functionality is very limited
*/
$sWhere = "WHERE (`deleted`='0')";
if ($_GET['sSearch'] != "") {
$sWhere = "WHERE (`deleted`='0') AND (";
for ($i = 0; $i < count($aColumns); $i++) {
$sWhere .= $aColumns[$i] . " LIKE '%" . mysql_real_escape_string($_GET['sSearch']) . "%' OR ";
}
$sWhere = substr_replace($sWhere, "", -3);
$sWhere .= ')';
}
/* Individual column filtering */
for ($i = 0; $i < count($aColumns); $i++) {
if ($_GET['bSearchable_' . $i] == "true" && $_GET['sSearch_' . $i] != '') {
if ($sWhere == "") {
$sWhere = "WHERE ";
} else {
$sWhere .= " AND ";
}
$sWhere .= $aColumns[$i] . " LIKE '%" . mysql_real_escape_string($_GET['sSearch_' . $i]) . "%' ";
}
}
/*
* SQL queries
* Get data to display
*/
$sQuery = "
SELECT SQL_CALC_FOUND_ROWS " . str_replace(" , ", " ", implode(", ", $aColumns)) . "
FROM $sTable
$sWhere
$sOrder
$sLimit
";
$rResult = $this->queryClover($sQuery);
/* Data set length after filtering */
$sQuery = "
SELECT FOUND_ROWS()
";
$rResultFilterTotal = $this->queryClover($sQuery);
$aResultFilterTotal = mysql_fetch_array($rResultFilterTotal);
$iFilteredTotal = $aResultFilterTotal[0];
/* Total data set length */
$sQuery = "
SELECT COUNT(" . $sIndexColumn . ")
FROM $sTable
";
$rResultTotal = $this->queryClover($sQuery);
$aResultTotal = mysql_fetch_array($rResultTotal);
$iTotal = $aResultTotal[0];
/*
* Output
*/
$output = array(
"sEcho" => intval($_GET['sEcho']),
"iTotalRecords" => $iTotal,
"iTotalDisplayRecords" => $iFilteredTotal,
"aaData" => array()
);
while ($aRow = mysql_fetch_array($rResult)) {
$row = array();
$sAct = "";
/* put actions icons here for each row;
foreach ($this->aActionIcons as $sKey=>&$aActions){
$aRules=explode(",", $this->aActionAuths[$sKey]);
if(in_array($this->sSessionUserType, $aRules)){
$sAct.=$aActions;
}
elseif(in_array("self", $aRules)&&$aRow["email"]==$this->sSessionUserEmail){
$sAct.=$aActions;
}
}
$row[]=$sAct;
*/
foreach ($this->aActionIcons as $sKey => &$Action) {
if ($this->inLineFunction($this->aActionAuths[$sKey], $aRow)) {
$sAct.=$Action;
}
}
$row[] = $sAct;
for ($i = 0; $i < count($aColumns); $i++) {
if (isset($this->aaTableStructure[$aColumns[$i]]["function"])) {
$row[] = $this->inLineFunction($this->aaTableStructure[$aColumns[$i]]["function"], $aRow);
} else {
$row[] = $aRow[$aColumns[$i]];
}
}
$output['aaData'][] = $row;
}
return json_encode($output);
}
public function AjaxAdd(&$aaData) {
$iResult = $this->addRecords($aaData);
$aResult = array("changed" => $iResult);
return json_encode($aResult);
}
public function AjaxDelete(&$aIds) {
$iResult = $this->deleteRecords($aIds);
$aResult = array("changed" => $iResult);
return json_encode($aResult);
}
public function AjaxUpdate(&$aaData) {
$iResult = $this->updateRecords($aaData);
$aResult = array("changed" => $iResult);
return json_encode($aResult);
}
public function AjaxRead($id, $sType, $withlabel = false) {//read the field from a record whose id is $id according to $sType--detail, brief, detail, system;
return json_encode($this->readRecord($id, $sType, $withlabel));
}
}
class OperateUsers extends OperateTables {
function __construct() {
parent::__construct('bst_users', BasicSystemTables::$bst_users);
$this->aActionIcons["detail"] = '<img src="icons/page_white_text_width.png" name="action_close" title="Show/Hide details">';
$this->aActionIcons["edit"] = '<img src="icons/page_white_edit.png" name="action_edit" title="Edit this user">';
$this->aActionIcons["delete"] = '<img src="icons/cross.png" name="action_delete" title="Delete this user">';
$this->aActionIcons["password"] = '<img src="icons/key.png" name="action_password" title="Change the password of this user">';
$this->aActionAuths["detail"] = 'checkIdentity(user,admin)';
$this->aActionAuths["edit"] = 'checkIdentity(self,admin)';
$this->aActionAuths["delete"] = 'checkIdentity(admin)';
$this->aActionAuths["password"] = '<PASSWORD>)';
}
public function Login($sEmail, $sPassword) {//return the id of the user or zero if not logged in;
$aRecord["email"] = $sEmail;
$aRecord["password"] = <PASSWORD>56($sPassword);
$aRecord["deleted"] = "0";
$sQuery = "SELECT * FROM `" . $this->sTableName . "` WHERE ";
$aTemp = array();
foreach ($aRecord as $key => $value) {
$aTemp[] = "`" . $key . "`='" . mysql_real_escape_string($value) . "'";
}
$sQuery.=implode(" AND ", $aTemp);
$rResult = $this->queryClover($sQuery);
if (mysql_num_rows($rResult)) {
$aRow = mysql_fetch_array($rResult);
$iId = $aRow["id"];
$sIdentity = $aRow["identity"];
if ($sIdentity == "visitor") {
return "You have a visitor account. Please wait for activation by the administrator ";
} else {
$this->sSessionUserEmail = $aRow["email"];
$this->sSessionUserName = $aRow["name"];
$this->sSessionUserType = $aRow["identity"];
$this->iSessionUserId = $iId;
$_SESSION["user_id"] = $iId;
return "OK";
}
} else {
return "User information does not match. Please try again.";
}
}
function checkIfUserExist($email) {
$aEmail = array("email" => $email, "deleted" => '0');
return $this->checkIfRecordExist($aEmail);
}
public function AddUser($aUser) {
if (!$this->checkIfUserExist($aUser["email"])) {
$aaUser = array($aUser);
$this->addRecords($aaUser);
return true; //added
} else {
return false; //user already exists;
}
}
public function UpdateUser($aaUsers) {
return $this->updateRecords($aaUsers);
}
public function ReadUser($iId) {
return $this->readRecord($iId, "edit", FALSE);
}
public function DeleteUser(&$aIds) {
return $this->deleteRecords($aIds);
}
public function email2Link($index, $aRow) {
return '<a href="mailto:' . $aRow[$index] . '">' . $aRow[$index] . '</a>';
}
public function AjaxGenerateEmailList() {
$sQuery = "SELECT email
FROM bst_users WHERE deleted=0 AND identity!='visitor'";
$sList = "";
$rResult = $this->queryClover($sQuery);
if ($rResult) {
while ($aRow = mysql_fetch_array($rResult)) {
$sList.=$aRow["email"] . ";";
}
$aData = Array("changed" => 1, "emaillist" => $sList);
} else {
$aData = Array("changed" => 0);
}
return json_encode($aData);
}
}
class OperateEquipment extends OperateTables {
public function __construct() {
parent::__construct("bst_equipment", BasicSystemTables::$bst_equipment);
$this->aActionIcons["detail"] = '<img src="icons/page_white_text_width.png" name="action_close" title="Show/Hide details">';
$this->aActionIcons["edit"] = '<img src="icons/page_white_edit.png" name="action_edit" title="Edit this equipment">';
$this->aActionIcons["delete"] = '<img src="icons/cross.png" name="action_delete" title="Delete this equipment">';
$this->aActionIcons["apply"] = '<img src="icons/user_go.png" name="action_apply" title="Apply for using this equipment">';
$this->aActionIcons["reserve"] = '<img src="icons/tag_blue_add.png" name="action_reserve" title="Reserve this equipment">';
$this->aActionIcons["emaillist"] = '<img src="icons/email.png" name="action_emaillist" title="Generate email list of the user group of this equipment">';
$this->aActionAuths["detail"] = 'checkIdentity(user,admin)';
$this->aActionAuths["edit"] = 'checkIdentity(admin)|checkPosition(manager)';
$this->aActionAuths["delete"] = 'checkIdentity(admin)';
$this->aActionAuths["reserve"] = 'checkPosition(member,manager)';
$this->aActionAuths["apply"] = '!checkPosition(applicant,member,manager)';
$this->aActionAuths["emaillist"] = 'checkIdentity(admin)|checkPosition(manager)';
}
public function checkPosition($sRules, &$aRow) {
$aRules = explode(",", $sRules);
$sPosition = $this->EquipmentId2UserPosition($aRow["id"]); //the id here is the equipment id;
return in_array($sPosition, $aRules);
}
public function EquipmentId2UserPosition($iEquipmentId) {
$sQuery = "SELECT `position` FROM `bst_user_equipment_rights` WHERE `deleted`=0 AND `user_id`='" . $this->iSessionUserId . "' AND `equipment_id`='" . $iEquipmentId . "'";
$rResult = $this->queryClover($sQuery);
if ($rResult) {
while ($row = mysql_fetch_array($rResult)) {
return $row["position"];
}
}
}
public function status2Icon($index, $aRow) {
if ($aRow[$index] == "normal") {
return '<img src="icons/accept.png" title="Normal"/>';
} elseif ($aRow[$index] == "problematic") {
return '<img src="icons/bug_error.png" title="Problematic"/>';
} elseif ($aRow[$index] == "broken") {
return '<img src="icons/exclamation.png" title="Broken"/>';
}
}
public function AjaxGenerateEquipmentUserGroupEmailList($iEquipment_id) {
$sQuery = "SELECT bst_users.email
FROM bst_user_equipment_rights
LEFT JOIN bst_users ON bst_user_equipment_rights.user_id=bst_users.id
WHERE bst_users.deleted=0 AND
bst_user_equipment_rights.deleted=0 AND
bst_user_equipment_rights.position!='applicant' AND
bst_user_equipment_rights.equipment_id=$iEquipment_id";
$sList = "";
$rResult = $this->queryClover($sQuery);
if ($rResult) {
while ($aRow = mysql_fetch_array($rResult)) {
$sList.=$aRow["email"] . ";";
}
$aData = Array("changed" => 1, "emaillist" => $sList);
} else {
$aData = Array("changed" => 0);
}
return json_encode($aData);
}
}
class OperateUserEquipmentRights extends OperateTables {
public function __construct() {
parent::__construct("bst_user_equipment_rights", BasicSystemTables::$bst_user_equipment_rights);
$this->aActionIcons["change_position"] = '<img src="icons/user_edit.png" name="action_change_position" title="Change the Position of this user in this equipment\'s user group">';
$this->aActionIcons["delete"] = '<img src="icons/delete.png" name="action_delete" title="Remove this user from the user group of this equipment">';
$this->aActionAuths["change_position"] = 'checkIdentity(admin)||checkPosition(manager)';
$this->aActionAuths["delete"] = 'checkIdentity(admin)||checkPosition(manager)';
}
public function AjaxSearch() {
$aColumns = array();
$this->getFields($aColumns, "name", "brief");
$atColumns = Array();
foreach ($aColumns as &$Col) {
$atColumns[] = "bst_user_equipment_rights." . $Col;
}
/* Indexed column (used for fast and accurate table cardinality) */
$sIndexColumn = "id";
/* DB table to use */
$sTable = $this->sTableName;
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* If you just want to use the basic configuration for DataTables with PHP server-side, there is
* no need to edit below this line
*/
/*
* Paging
*/
$sLimit = "";
if (isset($_GET['iDisplayStart']) && $_GET['iDisplayLength'] != '-1') {
$sLimit = "LIMIT " . mysql_real_escape_string($_GET['iDisplayStart']) . ", " .
mysql_real_escape_string($_GET['iDisplayLength']);
}
/*
* Ordering
*/
if (isset($_GET['iSortCol_0'])) {
$sOrder = "ORDER BY ";
for ($i = 0; $i < intval($_GET['iSortingCols']); $i++) {
if ($_GET['bSortable_' . intval($_GET['iSortCol_' . $i])] == "true") {
$sOrder .= $aColumns[intval($_GET['iSortCol_' . $i]) - 1] . "
" . mysql_real_escape_string($_GET['sSortDir_' . $i]) . ", "; //intval( $_GET['iSortCol_'.$i])-1, the -1 is to cancel out the first actions column;
}
}
$sOrder = substr_replace($sOrder, "", -2);
if ($sOrder == "ORDER BY") {
$sOrder = "";
}
}
/*
* Filtering
* NOTE this does not match the built-in DataTables filtering which does it
* word by word on any field. It's possible to do here, but concerned about efficiency
* on very large tables, and MySQL's regex functionality is very limited
*/
$aEColumns = Array(
"bst_users.name",
"bst_equipment.name",
"bst_user_equipment_rights.position"
);
$sWhere = "WHERE (bst_users.deleted=0) AND (bst_equipment.deleted=0) AND (bst_user_equipment_rights.deleted=0)";
if ($_GET['sSearch'] != "") {
$sWhere = "WHERE (bst_users.deleted=0) AND (bst_equipment.deleted=0) AND (bst_user_equipment_rights.deleted=0) AND (";
for ($i = 0; $i < count($aEColumns); $i++) {
$sWhere .= $aEColumns[$i] . " LIKE '%" . mysql_real_escape_string($_GET['sSearch']) . "%' OR ";
}
$sWhere = substr_replace($sWhere, "", -3);
$sWhere .= ')';
}
/*
* SQL queries
* Get data to display
*/
$sQuery = "
SELECT SQL_CALC_FOUND_ROWS " . str_replace(" , ", " ", implode(", ", $atColumns)) . "
FROM bst_users RIGHT JOIN bst_user_equipment_rights ON bst_users.id=bst_user_equipment_rights.user_id
LEFT JOIN bst_equipment ON bst_user_equipment_rights.equipment_id=bst_equipment.id
$sWhere
$sOrder
$sLimit
";
$rResult = $this->queryClover($sQuery);
/* Data set length after filtering */
$sQuery = "
SELECT FOUND_ROWS()
";
$rResultFilterTotal = $this->queryClover($sQuery);
$aResultFilterTotal = mysql_fetch_array($rResultFilterTotal);
$iFilteredTotal = $aResultFilterTotal[0];
/* Total data set length */
$sQuery = "
SELECT COUNT(" . $sIndexColumn . ")
FROM $sTable WHERE `deleted`=0;
";
$rResultTotal = $this->queryClover($sQuery);
$aResultTotal = mysql_fetch_array($rResultTotal);
$iTotal = $aResultTotal[0];
/*
* Output
*/
$output = array(
"sEcho" => intval($_GET['sEcho']),
"iTotalRecords" => $iTotal,
"iTotalDisplayRecords" => $iFilteredTotal,
"aaData" => array()
);
while ($aRow = mysql_fetch_array($rResult)) {
$row = array();
$sAct = "";
foreach ($this->aActionIcons as $sKey => &$Action) {
if ($this->inLineFunction($this->aActionAuths[$sKey], $aRow)) {
$sAct.=$Action;
}
}
$row[] = $sAct;
for ($i = 0; $i < count($aColumns); $i++) {
if (isset($this->aaTableStructure[$aColumns[$i]]["function"])) {
$row[] = $this->inLineFunction($this->aaTableStructure[$aColumns[$i]]["function"], $aRow);
} else {
$row[] = $aRow[$aColumns[$i]];
}
}
$output['aaData'][] = $row;
}
return json_encode($output);
}
}
class OperateUserEquipmentReservations extends OperateTables {
public function __construct() {
parent::__construct("bst_user_equipment_reservations", BasicSystemTables::$bst_user_equipment_reservations);
$this->aActionIcons["delete"] = '<img src="icons/cross.png" name="action_delete_reservation" title="Delete this reservation">';
$this->aActionAuths["delete"] = 'checkIdentity(admin,self)||checkPosition(manager)';
}
public function checkTimeConflict($iEquipment_id, $sDate, $iFrom, $iTo) {
$sQuery = "SELECT `from`, `to`
FROM bst_user_equipment_reservations
WHERE `deleted`=0 AND `equipment_id`=$iEquipment_id AND `date`='$sDate'
AND (
(`from`<$iFrom AND `to`>$iFrom)
OR
(`from`<$iTo AND `to`>$iTo)
OR
(`from`>=$iFrom AND `to`<=$iTo)
)";
$result = $this->queryClover($sQuery);
return mysql_num_rows($result);
}
public function AjaxGetReservation($sDate, $equipment_id) {//date in YYYY-mm-dd format;
$aColumns = array();
$this->getFields($aColumns, "name", "brief");
$sQuery = "SELECT `" . str_replace(" , ", " ", implode("`, `", $aColumns)) . "`
FROM bst_user_equipment_reservations
WHERE `deleted`=0 AND `date`='$sDate' AND `equipment_id`=$equipment_id" . " ORDER BY `from`";
$output = array();
$rResult = $this->queryClover($sQuery);
while ($aRow = mysql_fetch_array($rResult)) {
$sAct = "";
foreach ($this->aActionIcons as $sKey => &$Action) {
if ($this->inLineFunction($this->aActionAuths[$sKey], $aRow)) {
$sAct.=$Action;
}
}
$row["actions"] = $sAct;
for ($i = 0; $i < count($aColumns); $i++) {
if (isset($this->aaTableStructure[$aColumns[$i]]["function"])) {
$row[$aColumns[$i]] = $this->inLineFunction($this->aaTableStructure[$aColumns[$i]]["function"], $aRow);
} else {
$row[$aColumns[$i]] = $aRow[$aColumns[$i]];
}
}
$output[] = $row;
}
return json_encode($output);
}
}
class OperateMyEquipment extends OperateEquipment {
public function __construct() {
parent::__construct();
}
public function getFields(&$aFields, $sColumn = "name", $sType = "system") {
parent::getFields($aFields, $sColumn, $sType);
if ($sType == "brief" || $sType == "detail")
$aFields[] = BasicSystemTables::$bst_user_equipment_rights["position"][$sColumn];
}
public function getAuthEquipment() {
$sQuery = "SELECT bst_equipment.id, bst_equipment.name
FROM bst_equipment RIGHT JOIN bst_user_equipment_rights ON bst_equipment.id=bst_user_equipment_rights.equipment_id
WHERE bst_equipment.deleted=0 AND bst_user_equipment_rights.deleted=0 AND bst_user_equipment_rights.position!='applicant' AND bst_user_equipment_rights.user_id=" . $this->iSessionUserId;
$rResult = $this->queryClover($sQuery);
$aEquip = Array();
if ($rResult) {
while ($row = mysql_fetch_array($rResult)) {
$aEquip[] = Array("id" => $row["id"], "name" => $row["name"]);
}
}
return $aEquip;
}
public function AjaxSearch() {
$aColumns = array();
$this->getFields($aColumns, "name", "brief");
/* Indexed column (used for fast and accurate table cardinality) */
$sIndexColumn = "id";
/* DB table to use */
$sTable = $this->sTableName;
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* If you just want to use the basic configuration for DataTables with PHP server-side, there is
* no need to edit below this line
*/
/*
* Paging
*/
$sLimit = "";
if (isset($_GET['iDisplayStart']) && $_GET['iDisplayLength'] != '-1') {
$sLimit = "LIMIT " . mysql_real_escape_string($_GET['iDisplayStart']) . ", " .
mysql_real_escape_string($_GET['iDisplayLength']);
}
/*
* Ordering
*/
if (isset($_GET['iSortCol_0'])) {
$sOrder = "ORDER BY ";
for ($i = 0; $i < intval($_GET['iSortingCols']); $i++) {
if ($_GET['bSortable_' . intval($_GET['iSortCol_' . $i])] == "true") {
$sOrder .= $aColumns[intval($_GET['iSortCol_' . $i]) - 1] . "
" . mysql_real_escape_string($_GET['sSortDir_' . $i]) . ", "; //intval( $_GET['iSortCol_'.$i])-1, the -1 is to cancel out the first actions column;
}
}
$sOrder = substr_replace($sOrder, "", -2);
if ($sOrder == "ORDER BY") {
$sOrder = "";
}
}
/*
* Filtering
* NOTE this does not match the built-in DataTables filtering which does it
* word by word on any field. It's possible to do here, but concerned about efficiency
* on very large tables, and MySQL's regex functionality is very limited
*/
$sWhere = "WHERE ($sTable.deleted=0) AND (bst_user_equipment_rights.deleted=0) AND (bst_user_equipment_rights.user_id=$this->iSessionUserId)";
if ($_GET['sSearch'] != "") {
$sWhere = "WHERE ($sTable.deleted=0) AND (bst_user_equipment_rights.deleted=0) AND (bst_user_equipment_rights.user_id=$this->iSessionUserId) AND (";
for ($i = 0; $i < count($aColumns); $i++) {
if ($aColumns[$i] == "id") {
$sWhere .= "bst_equipment." . $aColumns[$i] . " LIKE '%" . mysql_real_escape_string($_GET['sSearch']) . "%' OR ";
} else {
$sWhere .=$aColumns[$i] . " LIKE '%" . mysql_real_escape_string($_GET['sSearch']) . "%' OR ";
}
}
$sWhere = substr_replace($sWhere, "", -3);
$sWhere .= ')';
}
/* Individual column filtering */
for ($i = 0; $i < count($aColumns); $i++) {
if ($_GET['bSearchable_' . $i] == "true" && $_GET['sSearch_' . $i] != '') {
if ($sWhere == "") {
$sWhere = "WHERE ";
} else {
$sWhere .= " AND ";
}
$sWhere .= $sTable . "." . $aColumns[$i] . " LIKE '%" . mysql_real_escape_string($_GET['sSearch_' . $i]) . "%' ";
}
}
/*
* SQL queries
* Get data to display
*/
$sQuery = "
SELECT SQL_CALC_FOUND_ROWS $sTable." . str_replace(" , ", " ", implode(", ", $aColumns)) . "
FROM $sTable RIGHT JOIN bst_user_equipment_rights ON $sTable.id=bst_user_equipment_rights.equipment_id
$sWhere
$sOrder
$sLimit
";
$rResult = $this->queryClover($sQuery);
/* Data set length after filtering */
$sQuery = "
SELECT FOUND_ROWS()
";
$rResultFilterTotal = $this->queryClover($sQuery);
$aResultFilterTotal = mysql_fetch_array($rResultFilterTotal);
$iFilteredTotal = $aResultFilterTotal[0];
/* Total data set length */
$sQuery = "
SELECT COUNT(" . $sIndexColumn . ")
FROM $sTable
";
$rResultTotal = $this->queryClover($sQuery);
$aResultTotal = mysql_fetch_array($rResultTotal);
$iTotal = $aResultTotal[0];
/*
* Output
*/
$output = array(
"sEcho" => intval($_GET['sEcho']),
"iTotalRecords" => $iTotal,
"iTotalDisplayRecords" => $iFilteredTotal,
"aaData" => array()
);
while ($aRow = mysql_fetch_array($rResult)) {
$row = array();
$sAct = "";
/* put actions icons here for each row;
foreach ($this->aActionIcons as $sKey=>&$aActions){
$aRules=explode(",", $this->aActionAuths[$sKey]);
if(in_array($this->sSessionUserType, $aRules)){
$sAct.=$aActions;
}
elseif(in_array("self", $aRules)&&$aRow["email"]==$this->sSessionUserEmail){
$sAct.=$aActions;
}
}
$row[]=$sAct;
*/
foreach ($this->aActionIcons as $sKey => &$Action) {
if ($this->inLineFunction($this->aActionAuths[$sKey], $aRow)) {
$sAct.=$Action;
}
}
$row[] = $sAct;
for ($i = 0; $i < count($aColumns); $i++) {
if (isset($this->aaTableStructure[$aColumns[$i]]["function"])) {
$row[] = $this->inLineFunction($this->aaTableStructure[$aColumns[$i]]["function"], $aRow);
} else {
$row[] = $aRow[$aColumns[$i]];
}
}
$output['aaData'][] = $row;
}
return json_encode($output);
}
}
?>
<file_sep>/server_side_equipment_record_processing.php
<?php
/*
* This file handle the post request send by ajax.
* Input data:
* "action" should be "add" "edit" "delete";
* "table" should be the desired table.
* "id" should be the record id;
*
*
*/
include_once('_basics.php');
CheckUser();
$cOpt = new OperateEquipment();
function checkInput($sKey, $sValue) {
global $cOpt;
$sMsg = "";
$sValue = trim($sValue);
if ($sKey == "name") {
if ($sValue == "") {
$sMsg.="Please enter an equipment name.<br/>";
} else {
$aRecord = Array("name" => $sValue);
if ($cOpt->checkIfRecordExist($aRecord)) {
$sMsg.="Equipment name entered was already in use.<br/>";
$sMsg.="Please choose another name.<br/>";
}
}
} elseif ($sKey == "location" && $sValue == "") {
$sMsg.="Please enter the location of the equipment.<br/>";
}
return $sMsg;
}
if ($_POST["action"] == "detail") {
$jResult = $cOpt->AjaxRead($_POST['iId'], "detail", true);
} else if ($_POST["action"] == "update") {
$aaValues = json_decode($_POST["values"], true);
$aKeys = json_decode($_POST['keys'], true);
for ($i = 0; $i < count($aaValues); $i++) {
for ($j = 0; $j < count($aKeys); $j++) {
$aaData[$i][$aKeys[$j]] = $aaValues[$i][$j];
}
}
$jResult = $cOpt->AjaxUpdate($aaData);
} else if ($_POST["action"] == "add") {
$aaValues = json_decode($_POST["values"], true);
$aKeys = json_decode($_POST['keys'], true);
$sErrors = "";
for ($i = 0; $i < count($aaValues); $i++) {
for ($j = 0; $j < count($aKeys); $j++) {
$sErrors.=checkInput($aKeys[$j], $aaValues[$i][$j]);
$aaData[$i][$aKeys[$j]] = $aaValues[$i][$j];
}
}
if ($sErrors == "") {
$jResult = $cOpt->AjaxAdd($aaData);
} else {
$aErrors = Array("changed" => 0, "errors" => $sErrors);
$jResult = json_encode($aErrors);
}
} else if ($_POST["action"] == "edit") {
$jResult = $cOpt->AjaxRead($_POST['iId'], "edit");
} else if ($_POST["action"] == "delete") {
$aIds = explode(",", $_POST["sIds"]);
$jResult = $cOpt->AjaxDelete($aIds);
} else if ($_POST["action"] == "apply") {
$aIds = explode(",", $_POST["sIds"]);
for ($i = 0; $i < count($aIds); $i++) {
$aaValues[$i]['user_id'] = $cOpt->iSessionUserId;
$aaValues[$i]['position'] = "applicant";
$aaValues[$i]['equipment_id'] = $aIds[$i];
}
$cOptUER = new OperateUserEquipmentRights();
$jResult = $cOptUER->addRecords($aaValues);
}else if ($_POST["action"]=="emaillist"){
$jResult=$cOpt->AjaxGenerateEquipmentUserGroupEmailList($_POST["id"]);
}
echo $jResult;
?>
<file_sep>/server_side_user_login.php
<?php
include_once('_basics.php');
$cUser=new OperateUsers();
$sResult=$cUser->Login($_POST["email"], $_POST["password"]);
echo json_encode($sResult);
?>
<file_sep>/server_side_setup_processing.php
<?php
/*
* Create database and tables
* Create new administrator account
*/
include_once '_basics.php';
class SetupClover extends OperateTables {
private $sErrMsg = "";
function checkLocalHost() {//whether visit from localhost;
$whitelist = array('localhost', '127.0.0.1');
if (!in_array($_SERVER['HTTP_HOST'], $whitelist)) {
$this->sErrMsg.='Forbidden! Please visit this page using "localhost".<br/>';
return false;
} else {
return true;
}
}
function checkCloverNonexistent() {//clover exsit or not;if exist return false, or return ture;
$sQuery = "SHOW TABLES FROM " . $this->sCloverDatabaseName;
$rResult = mysql_query($sQuery) or $this->aErrors[] = "Database Error: cannot query mySQL database with the query: " . $sQuery;
if ($rResult) {
while ($row = mysql_fetch_row($rResult)) {
if ($row[0] == "bst_users") {
$this->sErrMsg.='Forbidden! Clover already exists!<br/>';
return false;
}
}
} else {
return false;
}
return true;
}
function checkInput(&$aData) {
if (empty($aData['name'])) {
$this->sErrMsg.="Please enter your name.<br/>";
}
if (!filter_var($aData['email'], FILTER_VALIDATE_EMAIL)) {
$this->sErrMsg.="Please enter a validate email.<br/>";
}
if ($aData['password'] == "<PASSWORD>") {
$this->sErrMsg.="Please enter a password.<br/>";
}
if ($aData['password'] != $aData['repeat_password']) {
$this->sErrMsg.="Passwords you typed do not match.<br/>";
}
}
function __construct() {
$this->sTableName = "bst_users";
$this->aaTableStructure = array_merge(BasicSystemTables::$aaDefault, BasicSystemTables::$bst_users);
mysql_connect($this->sHost, $this->sUsername, $this->sPassword) or $this->aErrors[] = "Database Error: cannot connect to mySQL datebase!";
//create clover database;
$sQuery = "CREATE DATABASE IF NOT EXISTS`" . $this->sCloverDatabaseName . "` DEFAULT CHARACTER SET utf8 COLLATE utf8_unicode_ci";
$rResult = mysql_query($sQuery) or $this->aErrors[] = "Database Error: cannot query mySQL database with the query: " . $sQuery;
if ($this->checkLocalHost() && $this->checkCloverNonexistent()) {
$aaValues = json_decode($_POST["values"], true);
$aKeys = json_decode($_POST['keys'], true);
for ($i = 0; $i < count($aaValues); $i++) {
for ($j = 0; $j < count($aKeys); $j++) {
$aaData[$i][$aKeys[$j]] = $aaValues[$i][$j];
}
$this->checkInput($aaData[$i]);
if (empty($this->sErrMsg)) {
$aaData[$i]["id"] = "1";
$aaData[$i]["identity"] = "admin";
unset($aaData[$i]["repeat_password"]);
$aaData[$i]["password"] = <PASSWORD>($aaData[$i]["password"]);
}
}
}
if (empty($this->sErrMsg)) {//all user information is correct;
$this->setupCloverDatabase(); //creat all tables;
$iResult = $this->addRecords($aaData); //add user;
if ($iResult) {//admin account added;
session_name("cloveriver");
session_start();
$_SESSION["user_id"] = 1; //login;
$this->sSessionUserEmail = $aaData[0]["email"];
$this->sSessionUserName = $aaData[0]["name"];
$this->sSessionUserType = $aaData[0]["identity"];
$this->iSessionUserId = 1;
$aRlt["changed"] = 1;
}
} else {//user information incorrect;
$aRlt["changed"] = 0;
$aRlt["errors"] = $this->sErrMsg;
}
echo json_encode($aRlt);
}
function setupCloverDatabase() {
mysql_select_db($this->sCloverDatabaseName) or $this->aErrors[] = "Database Error: cannot select the " . $this->sCloverDatabaseName . " database.";
foreach (BasicSystemTables::$aTables as $table) {
$sQuery = "CREATE TABLE IF NOT EXISTS `" . $table . "` (";
$aaFields = array_merge(BasicSystemTables::$aaDefault, BasicSystemTables::${$table});
{
foreach ($aaFields as $aField) {
$sQuery.="`" . $aField["name"] . "` " . $aField["data_type"] . ",";
}
}
$sQuery = substr($sQuery, 0, strlen($sQuery) - 1) . ") ENGINE = InnoDB CHARACTER SET utf8 COLLATE utf8_unicode_ci";
if (!$this->queryClover($sQuery)) {
break;
}
}
}
}
$cSetup = new SetupClover();
?>
|
dafa470f5d3cb89e9be554f173d8f464e410d26d
|
[
"Markdown",
"PHP"
] | 16
|
PHP
|
XiaoMutt/Cloveriver
|
220e8aa50e31b30407a4484018f852826b4bdf1d
|
b7141cf53413e79187207b1627ef6fbf146a35b7
|
refs/heads/main
|
<file_sep>FROM node:15-alpine3.10
# FROM node:10
# Setting workdir to /web
WORKDIR /
# Copy package.json & package-lock.json to workdir
COPY package*.json ./
RUN npm install -g copyfiles typescript
RUN npm install
# Copying everything into workdir
COPY . .
# DEBUG
RUN ls
# Building the typescript files
RUN npm run build
# Copy static files to build
COPY static ./static
COPY views ./views
# Exposing web port and database
EXPOSE 8080
# Starting script
CMD [ "npm", "run", "start" ]
# docker run -d -p 8000:8080 -v ~/images:D:/images fbb02185c2f4<file_sep>import express from 'express';
import fs from 'fs';
const app = express()
const fileList = fs.readdirSync('/images');
async function next() {
return 1
}
function base64_encode(file: string) {
// read binary data
var bitmap = fs.readFileSync(file);
// convert binary data to base64 encoded string
return new Buffer(bitmap).toString('base64');
}
let current_image = 0
app.use(express.static(`/static/`))
app.set('views', `/views/`)
app.set('view engine', 'ejs')
app.get('/', (req: any, res: any) => {
let ci = base64_encode(`/images/${fileList[current_image]}`)
res.render('index', {i: ci, current: (current_image + 1), max: fileList.length})
})
app.get('/next', (req:any, res:any) => {
if (fileList.length - 1 <= current_image)
current_image = 0
else
current_image++
res.redirect('/');
})
let PORT = 8080
app.listen(8080, async () => {
console.log(`Ready to go on ${PORT}`)
});
|
bda7a63cee95bf86ecddb189c92234c521826aed
|
[
"TypeScript",
"Dockerfile"
] | 2
|
Dockerfile
|
linus-jansson/binder
|
2bb8b62dcc2c7751d9de661449d4650bb19e7a79
|
527c3cc7e3ac9fd8baa460cca7ce372099a1f705
|
refs/heads/master
|
<repo_name>MJ-sfo/Starting-Rails-Guides<file_sep>/blog/app/controllers/articles_controller.rb
class ArticlesController < ApplicationController
def new
end
def create
# render plain: params[:article].inspect
@article = Article.new(params[:article])
# using Cap - referring to class Article, defined in app/models/article.rb
@article.save # returns boolean - whether the article was saved
redirect_to @article
end
end
|
7ce68bdb6c8163286496addb5b324f68090e0d14
|
[
"Ruby"
] | 1
|
Ruby
|
MJ-sfo/Starting-Rails-Guides
|
754356a922128a85a0b9f3131c45f5702cc2f378
|
0be62763242ca002ed2d5728e0e1d5d11453cc9d
|
refs/heads/master
|
<repo_name>aur-archive/xfce4-indicator-plugin-git<file_sep>/PKGBUILD
# Maintainer: <NAME> <<EMAIL>>
_pkgname=xfce4-indicator-plugin
pkgname=xfce4-indicator-plugin-git
pkgver=259.f722729
pkgrel=1
pkgdesc="Plugin to display information from applications in the Xfce4 panel"
arch=('i686' 'x86_64')
url="http://goodies.xfce.org/projects/panel-plugins/xfce4-indicator-plugin"
license=('GPL')
depends=('xfce4-panel' 'libindicator' 'hicolor-icon-theme' 'xdg-utils' 'libxfce4ui-git')
makedepends=('intltool' 'xfce4-dev-tools')
optdepends=('indicator-application-gtk2: take menus from applications and place them in the panel'
'indicator-sound-gtk2: unified sound menu')
install=$pkgname.install
source=(git://git.xfce.org/panel-plugins/xfce4-indicator-plugin.git)
md5sums=('SKIP')
pkgver(){
cd "$srcdir/$_pkgname"
echo $(git rev-list --count HEAD).$(git rev-parse --short HEAD)
}
build() {
cd "$srcdir/$_pkgname"
./autogen.sh --prefix=/usr --sysconfdir=/etc --localstatedir=/var --libexecdir=/usr/lib \
--disable-static
make
}
package() {
cd "$srcdir/$_pkgname"
make DESTDIR="$pkgdir/" install
}
|
f1f5adb46f3d6c209ef295116acc97d9240eaec0
|
[
"Shell"
] | 1
|
Shell
|
aur-archive/xfce4-indicator-plugin-git
|
8bceffa4b681c9aaa4ddb446405be32e4843df91
|
5b6127db558a5226324c231aa027f6b85a4c18d3
|
refs/heads/main
|
<file_sep>import * as crypto from 'crypto';
import { Injectable } from '@nestjs/common';
import { ConfigService } from '@nestjs/config';
import { Exception } from 'handlebars';
@Injectable()
export class Encrypter {
key = '';
algorithm = '';
ivLength = 16;
constructor(private configService: ConfigService) {
this.key = configService.get('app.key');
this.algorithm = 'aes-256-cbc';
this.ivLength = 16;
}
/**
* Create a new encryption key for the given cipher.
*
* @param string algorithm
* @return string
*/
public generateKey(algorithm?: string) {
return crypto.randomBytes(
algorithm.toUpperCase() === 'AES-128-CBC' ? 16 : 32,
);
}
/**
* Encrypt the given value.
*
* @param mixed value
* @param bool serialize
* @return string
*
*/
public encrypt(value: any, serialize = true) {
const iv = crypto.randomBytes(this.ivLength);
// First we will encrypt the value using OpenSSL. After this is encrypted we
// will proceed to calculating a MAC for the encrypted value so that this
// value can be verified later as not having been changed by the users.
const cryptValue = this.open_encrypt(
serialize ? JSON.stringify(value) : value,
this.getKey(),
iv,
);
if (!cryptValue) {
throw new Error('Could not encrypt the data.');
}
// Once we get the encrypted value we'll go ahead and base64_encode the input
// vector and create the MAC for the encrypted value so we can then verify
// its authenticity. Then, we'll JSON the data into the "payload" array.
const encIv = this.base64_encode(iv);
const mac = this.hash(encIv, cryptValue);
const json = JSON.stringify({ iv: encIv, value: cryptValue, mac });
return this.base64_encode(json);
}
/**
* Encrypt a string without serialization.
*
* @param string value
* @return string
*/
public encryptString(value: string) {
return this.encrypt(value, false);
}
/**
* Decrypt the given value.
*
* @param string payload
* @param bool unserialize
* @return any
*/
public decrypt(payload: any, unserialize?: boolean) {
const newPayload = this.getJsonPayload(payload);
const iv = this.base64_decode(newPayload.iv, true);
// Here we will decrypt the value. If we are able to successfully decrypt it
// we will then unserialize it and return it out to the caller. If we are
// unable to decrypt this value we will throw out an exception message.
const decrypted = this.open_decrypt(newPayload.value, this.getKey(), iv);
if (!decrypted) {
throw new Error('Could not decrypt the data.');
}
return unserialize ? JSON.parse(decrypted) : decrypted;
}
/**
* Decrypt the given string without unserialization.
*
* @param string payload
* @return string
*/
public decryptString(payload: any) {
return this.decrypt(payload, false);
}
/**
* Create a MAC for the given value.
*
* @param any iv
* @param any value
* @return string
*/
protected hash(iv: any, value: any): string {
const hmac = crypto.createHmac('sha256', this.key);
hmac.update(`${iv}${value}`);
return hmac.digest('hex');
}
/**
* Get the JSON array from the given payload.
*
* @param string payload
* @return array
*
* @throws \Illuminate\Contracts\Encryption\DecryptException
*/
protected getJsonPayload(payload: string) {
const newPayload = JSON.parse(this.base64_decode(payload));
// If the payload is not valid JSON or does not have the proper keys set we will
// assume it is invalid and bail out of the routine since we will not be able
// to decrypt the given value. We'll also check the MAC for this encryption.
if (!this.validPayload(newPayload)) {
throw new Error('Decryption Error: The payload is invalid.');
}
if (!this.validMac(newPayload)) {
throw new Error('Decryption Error: The MAC is invalid.');
}
return newPayload;
}
/**
* Create a MAC for the given value.
*
* @param any iv
* @param any value
* @return string
*/
public hash_hmac(algo: any, data: any, key?: any, rawOutput?: boolean): any {
const hmac = crypto.createHmac(algo, key ? key : this.getKey());
hmac.update(data);
return rawOutput ? hmac.digest() : hmac.digest('hex');
}
/**
* Timing attsach safe string compare
*
* @param known_string string
* @param user_string string
*/
public hash_equal(known_string: string, user_string: string): boolean {
try {
return crypto.timingSafeEqual(
Buffer.from(known_string),
Buffer.from(user_string),
);
} catch (ex) {
return false;
}
}
/**
* Base64 encode
*
* @param value
*/
public base64_encode(value: any): string {
return Buffer.from(value).toString('base64');
}
/**
* Base64 decode
*
* @param value
*/
public base64_decode(value: any, bufferOnly?: boolean): any | string {
const decoded = Buffer.from(value, 'base64');
return bufferOnly ? decoded : decoded.toString('utf8');
}
public sha1(value: string, rawOutput?: boolean): any {
const hash = crypto.createHash('sha1');
hash.update(value);
return rawOutput ? hash.digest() : hash.digest('hex');
}
/**
* Verify that the encryption payload is valid.
*
* @param mixed payload
* @return bool
*/
protected validPayload(payload: any): boolean {
return (
this.isArray(payload) &&
this.isSet(payload.iv, payload.value, payload.mac) &&
Buffer.byteLength(this.base64_decode(payload.iv, true)) === this.ivLength
);
}
/**
* Determine if the MAC for the given payload is valid.
*
* @param object payload
* @return bool
*/
protected validMac(payload: any) {
const bytes = crypto.randomBytes(this.ivLength);
const calculated = this.calculateMac(payload, bytes);
const hash = this.hash_hmac('sha256', payload.mac, bytes, true);
return crypto.timingSafeEqual(hash, calculated);
}
/**
* Calculate the hash of the given payload.
*
* @param object payload
* @param string bytes
* @return string
*/
protected calculateMac(payload: any, bytes: any) {
const hash = this.hash(payload.iv, payload.value);
return this.hash_hmac('sha256', hash, bytes, true);
}
protected getKey(): Buffer {
return Buffer.from(this.key, 'base64');
}
// algorithm = 'aes-256-cbc',
// cryptoIv = Buffer.from(crypto.randomBytes(8)).toString('hex');
public open_encrypt(value: string, key: Buffer, iv: any): string {
const cipher = crypto.createCipheriv(this.algorithm, key, iv);
let encrypted = cipher.update(value, 'utf8', 'hex');
encrypted += cipher.final('hex');
return encrypted;
}
public open_decrypt(value: string, key: Buffer, iv: any): string {
const cipher = crypto.createDecipheriv(this.algorithm, key, iv);
let decrypted = cipher.update(value, 'hex', 'utf8');
decrypted += cipher.final('utf8');
return decrypted;
}
private isArray(...mixedVar: any[]) {
if (!mixedVar.length) {
throw new Error('Valid argument is required to check');
}
const isArray = (entry: any) => Array.isArray(entry);
const isObject = (entry: any) => typeof entry === 'object';
const invalids = mixedVar.filter(mVar => !isArray(mVar) && !isObject(mVar));
return invalids.length === 0;
}
private isSet(...mixedVar: any[]): boolean {
const invalids = mixedVar.filter(
mVar =>
mVar === null &&
mVar === undefined &&
mVar === 'undefined' &&
mVar === null,
);
return invalids.length === 0;
}
}
<file_sep>import {
PrimaryGeneratedColumn,
CreateDateColumn,
UpdateDateColumn,
BeforeInsert,
BeforeUpdate,
} from 'typeorm';
export abstract class Base {
@PrimaryGeneratedColumn({
type: 'bigint',
})
id: number;
@CreateDateColumn({
name: 'created_at',
type: 'timestamp',
})
createdAt: Date;
@UpdateDateColumn({
name: 'updated_at',
type: 'timestamp',
})
updatedAt: Date;
@BeforeInsert()
beforeCreate() {
this.createdAt = new Date();
this.updatedAt = this.createdAt;
}
@BeforeUpdate()
beforeUpdate() {
this.updatedAt = new Date();
}
}
<file_sep>import { PhysiciansService } from './../../admin/physicians/physicians.service';
import { TypeOrmModule } from '@nestjs/typeorm';
import { DatabaseModule } from './../../shared/database/database.module';
import { Module } from '@nestjs/common';
import { RegistrationController } from './registration.controller';
import { Physician, PhysicianProfile } from './../../shared/database';
@Module({
imports: [
DatabaseModule,
TypeOrmModule.forFeature([
Physician,
PhysicianProfile,
])
],
controllers: [RegistrationController],
providers: [PhysiciansService]
})
export class RegistrationModule {}
<file_sep>import { PermissionsService } from './../permissions/permissions.service';
import { Controller, Patch, Post, UseGuards } from '@nestjs/common';
import { ApiTags, ApiBody, ApiBearerAuth } from '@nestjs/swagger';
import {
Crud,
CrudController,
Override,
CrudRequest,
ParsedRequest,
ParsedBody,
} from '@nestjsx/crud';
import { plainToClass } from 'class-transformer';
import { CreateRolesDto } from './dto/create-roles.dto';
import { UpdateRolesDto } from './dto/update-roles.dto';
import { Role } from '../../shared/database';
import { RolesService } from './roles.service';
import { AdminAuthGuard } from '../../auth/guards/admin.guard';
@Crud({
model: {
type: Role,
},
dto: {
create: CreateRolesDto,
update: UpdateRolesDto,
},
routes: {
exclude: ['replaceOneBase'],
deleteOneBase: {
returnDeleted: false,
},
},
params: {
id: {
field: 'id',
type: 'string',
primary: true,
},
},
query: {
alwaysPaginate: true,
exclude: ['deletedAt'],
join: {
permissions: {
persist: ['id', 'name', 'display_name'],
exclude: ['createdAt', 'updatedAt', 'tableName'],
eager: true,
},
},
},
})
@ApiTags('Roles (Admin)')
@UseGuards(AdminAuthGuard)
@ApiBearerAuth('JWT')
@Controller('admin/roles')
export class RolesController implements CrudController<Role> {
constructor(
public service: RolesService,
private permissionService: PermissionsService,
) {}
get base(): CrudController<Role> {
return this;
}
@Override()
@Post()
async createOne(
@ParsedRequest() req: CrudRequest,
@ParsedBody() dto: CreateRolesDto,
): Promise<any> {
const data: Role = plainToClass(Role, dto);
const permissionIds: number[] = dto.permissions ? dto.permissions : [];
const perms = permissionIds.length
? await this.permissionService.repository.findByIds(permissionIds)
: [];
if (perms.length) {
data.permissions = perms;
}
return this.base.createOneBase(req, data);
}
@ApiBody({
type: UpdateRolesDto,
})
@Override()
@Patch(':id')
async updateOne(
@ParsedRequest() req: CrudRequest,
@ParsedBody() dto: Partial<UpdateRolesDto>,
): Promise<any> {
const data: Role = plainToClass(Role, dto);
const permissionIds: number[] = dto.permissions ? dto.permissions : [];
data.permissions = permissionIds.length
? await this.permissionService.repository.findByIds(permissionIds)
: [];
return this.base.updateOneBase(req, data);
}
}
<file_sep>import { NestFactory } from '@nestjs/core';
import { ConfigService } from '@nestjs/config';
import { AppModule } from './app.module';
import {
ValidationPipe,
ValidationError,
UnprocessableEntityException,
} from '@nestjs/common';
import { SwaggerModule, DocumentBuilder } from '@nestjs/swagger';
import { useContainer } from 'class-validator';
import { Helpers } from './shared/helpers';
async function bootstrap() {
const app = await NestFactory.create(AppModule);
const configService = app.get(ConfigService);
const basePath = configService.get('app.base_path');
let apiDocPath = 'docs';
app.enableCors({
origin: '*',
methods: 'GET,HEAD,PUT,PATCH,POST,DELETE',
preflightContinue: false,
// optionsSuccessStatus: 204,
});
Helpers.ensureKeys(configService.get('app.decodedKey'));
if (basePath) {
app.setGlobalPrefix(basePath);
apiDocPath = `${basePath}/${apiDocPath}`;
}
app.useGlobalPipes(
new ValidationPipe({
whitelist: true,
exceptionFactory: (errors: ValidationError[]) => {
const fErrors = {};
errors.map(err => {
fErrors[err.property] = [];
for (const property in err.constraints) {
if (err.constraints.hasOwnProperty(property)) {
fErrors[err.property].push(err.constraints[property]);
}
}
});
return new UnprocessableEntityException({
message: 'Validation error occured',
errors: fErrors,
});
},
}),
);
useContainer(app.select(AppModule), { fallbackOnErrors: true });
const port = configService.get('app.port');
const options = new DocumentBuilder()
.setTitle(`${configService.get('app.name')} API`)
.setDescription(`${configService.get('app.name')} API Documentation`)
.setVersion('1.0')
.addBearerAuth(
{ type: 'http', scheme: 'bearer', bearerFormat: 'JWT' },
'JWT',
)
.build();
const document = SwaggerModule.createDocument(app, options);
SwaggerModule.setup(apiDocPath, app, document);
await app.listen(process.env.PORT || 4000);
}
bootstrap();
<file_sep>import { Module } from '@nestjs/common';
import { ConfigService, ConfigModule } from '@nestjs/config';
import { TypeOrmModule } from '@nestjs/typeorm';
@Module({
imports: [
TypeOrmModule.forRootAsync({
imports: [ConfigModule],
useFactory: (configService: ConfigService) => ({
name: 'default',
type: 'mysql',
host: configService.get('database.host'),
port: configService.get<number>('database.port'),
username: configService.get('database.user'),
password: configService.get('database.password'),
database: configService.get('database.database'),
charset: configService.get('database.charset'),
entityPrefix: configService.get('database.prefix') || '',
autoLoadEntities: true,
logger: 'advanced-console',
logging:
configService.get('app.debug') === true
? 'all'
: configService.get('app.debug'),
legacySpatialSupport: configService.get(
'database.legacySpatialSupport',
),
synchronize: configService.get('database.synchronize'),
}),
inject: [ConfigService],
}),
],
})
export class DatabaseModule {}
<file_sep>import { PhysicianProfile } from './../../shared/database/entity/physician_profile.entity';
import { Physician } from './../../shared/database/entity/physician.entity';
import { Controller, Patch, Post, UnprocessableEntityException, UseGuards } from '@nestjs/common';
import { ApiBearerAuth, ApiBody, ApiTags } from '@nestjs/swagger';
import { AdminAuthGuard } from 'src/auth/guards/admin.guard';
import { Crud, CrudController, CrudRequest, Override, ParsedBody, ParsedRequest } from '@nestjsx/crud';
import { CreatePhysicianDto } from './dto/create-physician.dto';
import { UpdatePhysicianDto } from './dto/update-physician.dto';
import { PhysiciansService } from './physicians.service';
import { plainToClass } from 'class-transformer';
import { ValidateUniqueParam } from 'src/shared/validations';
@Crud({
model: {
type: Physician,
},
dto: {
create: CreatePhysicianDto,
update: UpdatePhysicianDto,
replace: UpdatePhysicianDto,
},
routes: {
exclude: ['replaceOneBase', 'createManyBase'],
deleteOneBase: {
returnDeleted: false,
},
},
params: {
id: {
field: 'id',
type: 'string',
primary: true,
},
},
query: {
alwaysPaginate: true,
exclude: ['password', 'deletedAt'],
join: {
profile: {
exclude: ['createdAt', 'updatedAt'],
eager: true,
},
},
},
})
@ApiTags('Physicians (Admin)')
@UseGuards(AdminAuthGuard)
@ApiBearerAuth('JWT')
@Controller('admin/physicians')
export class PhysiciansController implements CrudController<Physician> {
constructor(
public service: PhysiciansService,
) {}
get base(): CrudController<Physician> {
return this;
}
@Override()
@Post()
async createOne(
@ParsedRequest() req: CrudRequest,
@ParsedBody() dto: CreatePhysicianDto,
): Promise<any> {
const data: Physician = plainToClass(Physician, dto);
const profile: PhysicianProfile = dto.profile;
if (!profile.firstName) {
return new UnprocessableEntityException({
message: 'Validation error occured',
errors: {
operator: ['First name is required'],
}
});
}
if (!profile.lastName) {
return new UnprocessableEntityException({
message: 'Validation error occured',
errors: {
operator: ['Last name is required'],
}
});
}
if (!profile.phone) {
return new UnprocessableEntityException({
message: 'Validation error occured',
errors: {
operator: ['Phone number is required'],
}
});
}
return this.base.createOneBase(req, data);
}
@ApiBody({
type: UpdatePhysicianDto,
})
@Override()
@Patch('physicians/:id')
async updateOne(
@ParsedRequest() req: CrudRequest,
@ValidateUniqueParam({
name: 'email',
field: 'email',
entity: Physician,
mode: 'Update',
table: 'physicians',
})
uniqEmail: boolean,
@ValidateUniqueParam({
name: 'username',
field: 'username',
entity: Physician,
mode: 'Update',
table: 'physicians',
})
uniqUsername: boolean,
@ParsedBody() dto: Partial<UpdatePhysicianDto>,
): Promise<any> {
const data: Physician = plainToClass(Physician, dto);
const profile: PhysicianProfile = data.profile;
if (!profile.firstName) {
return new UnprocessableEntityException({
message: 'Validation error occured',
errors: {
operator: ['First name is required'],
}
});
}
if (!profile.lastName) {
return new UnprocessableEntityException({
message: 'Validation error occured',
errors: {
operator: ['Last name is required'],
}
});
}
if (!profile.phone) {
return new UnprocessableEntityException({
message: 'Validation error occured',
errors: {
operator: ['Phone number is required'],
}
});
}
return this.base.updateOneBase(req, data);
}
}
<file_sep>import { PasswordUpdateDto } from './../../shared/dto/password-update.dto';
/* eslint-disable @typescript-eslint/explicit-module-boundary-types */
import { UserAuthGuard } from './../../auth/guards/user.guard';
import { ApiBearerAuth, ApiNotFoundResponse, ApiOkResponse, ApiTags, ApiUnprocessableEntityResponse } from '@nestjs/swagger';
import { UsersService } from './../../admin/users/users.service';
import { UserRegistrationDto } from './dto/user-registration.dto';
import { Body, Controller, NotFoundException, Post, Req, Request, UnprocessableEntityException, UseGuards } from '@nestjs/common';
import { User, UserProfile } from './../../shared/database';
import { plainToClass } from 'class-transformer';
import { CompleteUserRegistrationDto } from './dto/complete-user-registration.dto';
import { UserRegistrationUpdateResponseDto } from './dto/user-registration-update_response.dto';
interface UserAPIResponse {
message: string,
success?: boolean,
error?:boolean,
data?: any
}
@ApiTags('Users')
@Controller('user')
export class RegistrationController {
constructor(
public service: UsersService
){}
@Post('registration')
async registerUser(
// @Request() req: Request,
@Body() dto: UserRegistrationDto,
): Promise<any> {
const data: User = plainToClass(User, dto);
const user = await this.service.repository.insert(data);
if (!user) {
return new UnprocessableEntityException(Error('Registration failed'))
}
return {
message: 'User Registration successful',
success: true,
};
}
@UseGuards(UserAuthGuard)
@ApiBearerAuth('JWT')
@ApiUnprocessableEntityResponse({
description: 'Returns this error when operation fails'
})
@ApiNotFoundResponse({
description: 'Returns this error when user is not found'
})
@ApiOkResponse({
description: 'Returns complete User profile on successful operation',
type: UserRegistrationUpdateResponseDto
})
@Post('complete-registration')
async completeRegistration(
@Request() req: any,
@Body() dto: CompleteUserRegistrationDto,
): Promise<any> {
const user: User = <User>req.user;
if (!user) {
return new NotFoundException(Error('User not found'))
}
try {
const profile = new UserProfile();
profile.firstName = dto.firstName;
profile.lastName = dto.lastName;
profile.biography = dto.biography;
profile.lastTreatedAilment = dto.lastTreatedAilment;
profile.phone = dto.phone;
profile.isAnonymous = dto.isAnonymous;
user.avatar = dto.avatar;
user.profile = profile;
this.service.repository.save(user);
return {
message: 'User registration updated successfully',
success: true,
data: user
};
} catch (ex) {
return new UnprocessableEntityException(Error('Operation failed'))
}
}
@UseGuards(UserAuthGuard)
@ApiBearerAuth('JWT')
@ApiUnprocessableEntityResponse({
description: 'Returns this error when operation fails'
})
@ApiNotFoundResponse({
description: 'Returns this error when user is not found'
})
@ApiOkResponse({
description: 'Returns complete message on successful operation',
type: 'Json response',
})
@Post('update-password')
async updateProfile(
@Request() req: any,
@Body() dto: PasswordUpdateDto,
): Promise<any> {
const user: User = <User>req.user;
if (!user) {
return new NotFoundException(Error('User not found'))
}
try {
user.password = <PASSWORD>;
this.service.repository.save(user);
return {
message: 'User password updated successfully',
success: true,
} as UserAPIResponse;
} catch (ex) {
return new UnprocessableEntityException(Error('Operation failed'))
}
}
}
<file_sep>import { ConfigService, ConfigModule } from '@nestjs/config';
import { AdminUsersService } from './admin/admin-users/admin-users.service';
import { RolesService } from './admin/roles/roles.service';
import { PermissionsService } from './admin/permissions/permissions.service';
import { Encrypter } from './shared/encrypter';
import { Test, TestingModule } from '@nestjs/testing';
import { AppController } from './app.controller';
import { AppService } from './app.service';
describe('AppController', () => {
let appController: AppController;
beforeEach(async () => {
const app: TestingModule = await Test.createTestingModule({
imports: [ConfigModule],
controllers: [AppController],
providers: [AppService, Encrypter, ConfigService],
}).compile();
appController = app.get<AppController>(AppController);
});
describe('root', () => {
it('should return "Hello World!"', () => {
expect(appController.getHello()).toBe('Hello World!');
});
});
});
<file_sep>import { toBool } from './../helpers';
import { registerAs } from '@nestjs/config';
export default registerAs('database', () => ({
connection: process.env.DB_CONNECTION,
host: process.env.DB_HOST || '127.0.0.1',
port: parseInt(process.env.DB_PORT, 10) || 3306,
database: process.env.DB_DATABASE,
user: process.env.DB_USERNAME,
password: <PASSWORD>,
prefix: process.env.DB_PREFIX,
charset: 'utf8mb4',
collation: 'utf8mb4_unicode_ci',
legacySpatialSupport:
process.env.DB_LEGACY_SPATIAL !== null &&
process.env.DB_LEGACY_SPATIAL !== 'undefined'
? toBool(process.env.DB_LEGACY_SPATIAL)
: true,
test_database: process.env.TEST_DB_NAME,
synchronize: process.env.SYNC_DB || false,
}));
<file_sep>import { Admin } from '../../shared/database';
import { Encrypter } from '../../shared/encrypter';
import { Injectable, UnauthorizedException } from '@nestjs/common';
import { Strategy, ExtractJwt } from 'passport-jwt';
import { PassportStrategy } from '@nestjs/passport';
import { readFileSync } from 'fs';
import { AuthService } from '../auth.service';
@Injectable()
export class AdminStrategy extends PassportStrategy(Strategy, 'admin') {
constructor(private authService: AuthService, private encrypter: Encrypter) {
super({
jwtFromRequest: ExtractJwt.fromAuthHeaderAsBearerToken(),
ignoreExpiration: false,
secretOrKey: readFileSync(
`${process.cwd()}/.keys/jwt-public.key`,
).toString(),
usernameField: 'email',
passwordField: '<PASSWORD>',
});
}
// eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types
async validate(payload: any) {
const userId: number = this.encrypter.decryptString(payload.sub);
const data: any = this.encrypter.decrypt(payload.data, true);
const user: Admin = await this.authService.validateAdmin(userId, data);
if (!user) {
throw new UnauthorizedException();
}
return user;
}
}
<file_sep>import { Encrypter } from './../src/shared/encrypter';
import { Test, TestingModule } from '@nestjs/testing';
import { INestApplication } from '@nestjs/common';
import * as request from 'supertest';
import { AppModule } from './../src/app.module';
import { getConnectionManager } from 'typeorm';
afterAll(async done => {
try {
const conn = getConnectionManager().get('default') ?? null;
if (conn) {
await conn.close();
}
} catch (_) {
console.log('Database Connection not Found');
} finally {
done();
}
});
describe('AppController (e2e)', () => {
let app: INestApplication;
beforeEach(async () => {
const moduleFixture: TestingModule = await Test.createTestingModule({
imports: [AppModule],
providers: [Encrypter],
}).compile();
app = moduleFixture.createNestApplication();
await app.init();
});
it('/ (GET)', () => {
return request(app.getHttpServer())
.get('/')
.expect(200)
.expect('Hello World!');
});
});
<file_sep>import {
Injectable,
NestInterceptor,
ExecutionContext,
CallHandler,
} from '@nestjs/common';
import { Observable } from 'rxjs';
@Injectable()
export class CustomInterceptor implements NestInterceptor {
intercept(context: ExecutionContext, next: CallHandler): Observable<any> {
const req = context.switchToHttp().getRequest();
// tslint:disable-next-line: no-console
console.log(req.params.id);
return next.handle();
}
}
<file_sep>import { Repository, getRepository } from 'typeorm';
import {
registerDecorator,
ValidationOptions,
ValidatorConstraint,
ValidatorConstraintInterface,
ValidationArguments,
} from 'class-validator';
import {
createParamDecorator,
ArgumentsHost,
UnprocessableEntityException,
} from '@nestjs/common';
@ValidatorConstraint({ async: true })
export class IsUniqueConstraint implements ValidatorConstraintInterface {
async validate(value: any, args: ValidationArguments) {
const entity = args.constraints[0];
const repo: Repository<any> = getRepository(entity);
const conditions = {};
conditions[args.property] = value;
return await repo.find(conditions).then(record => {
return record.length ? false : true;
});
}
defaultMessage(args: ValidationArguments) {
// here you can provide default error message if validation failed
return 'Record already exists with ($value)';
}
}
export function IsUnique(
entity: any,
column?: string,
idColumn?: string,
idValue?: any,
validationOptions?: ValidationOptions,
) {
// tslint:disable-next-line: no-console
// tslint:disable-next-line: ban-types
return (object: any, propertyName: string) => {
registerDecorator({
name: 'isUnique',
target: object.constructor,
propertyName,
options: validationOptions,
constraints: [entity, column, idValue, idColumn],
validator: IsUniqueConstraint,
});
};
}
export interface UniqueColumn {
name: string; // Column name
field: string; // Request field name
entity: any; // Entitry to use for database operation
idField?: string; // Record ID field name
id?: string | number; // Record ID value
mode: 'Create' | 'Update';
message?: string;
table: string;
}
export const ValidateUniqueParam = createParamDecorator(
async (column: UniqueColumn, args: ArgumentsHost) => {
const request = args.switchToHttp().getRequest();
const col = Object.assign(
{
idField: 'id',
mode: 'Create',
message: `Record already exists with (${request.body[column.field]})`,
},
column,
);
if (!request.body[col.field]) {
return false;
}
const qb = getRepository(col.entity).createQueryBuilder(col.table);
if (col.mode === 'Update' && request.params.id) {
qb.where(`${col.idField} != :id`, { id: request.params.id });
}
const exists = await qb
.andWhere(`${col.field} = :col`, { col: request.body[col.field] })
.getCount();
if (exists) {
const err = {};
err[col.field] = col.message;
throw new UnprocessableEntityException({
message: 'Validation error occured',
errors: [err],
});
}
return true;
},
);
<file_sep>/* eslint-disable @typescript-eslint/explicit-module-boundary-types */
import { MailerService } from '@nestjs-modules/mailer';
import { STRINGS } from '../shared/constants';
import { Encrypter } from '../shared/encrypter';
import { Injectable, HttpException, HttpStatus } from '@nestjs/common';
import { InjectRepository } from '@nestjs/typeorm';
import { JwtService } from '@nestjs/jwt';
import { Admin, User, Physician } from '../shared/database';
import { Repository, MoreThan } from 'typeorm';
import { UserLoginDto } from '../shared/dto/user-login.dto';
import * as bcrypt from 'bcrypt';
import { ConfigService } from '@nestjs/config';
import { UserPasswordResetDto } from './../shared/dto/user-password-reset.dto';
import { UrlSigner } from './../shared/url-signer';
import { route } from './../shared/helpers';
@Injectable()
export class AuthService {
constructor(
@InjectRepository(Admin)
private readonly adminRepo: Repository<Admin>,
@InjectRepository(User)
private readonly userRepo: Repository<User>,
@InjectRepository(Physician)
private readonly physicianRepo: Repository<Physician>,
private jwtService: JwtService,
private configService: ConfigService,
private encrypter: Encrypter,
private mailerService: MailerService,
private urlSigner: UrlSigner,
) {}
async adminLogin({ username, password }: UserLoginDto): Promise<Admin> {
const user = await this.adminRepo.findOne({ where: { email: username } });
if (!user) {
throw new HttpException(
STRINGS.auth.login.invalid,
HttpStatus.UNAUTHORIZED,
);
}
// console.log(user);
if (!bcrypt.compareSync(password, user.password)) {
throw new HttpException(
STRINGS.auth.login.invalid,
HttpStatus.UNAUTHORIZED,
);
}
return user;
}
async userLogin({ username, password }: UserLoginDto, userType: string): Promise<User | Physician> {
const user = userType === 'physician'
? await this.physicianRepo.findOne({ where: { email: username } })
: await this.userRepo.findOne({ where: { email: username } });
if (!user) {
throw new HttpException(
STRINGS.auth.login.invalid,
HttpStatus.UNAUTHORIZED,
);
}
// console.log(user);
if (!bcrypt.compareSync(password, user.password)) {
throw new HttpException(
STRINGS.auth.login.invalid,
HttpStatus.UNAUTHORIZED,
);
}
return user;
}
async validateAdmin(userId: any, data: any) {
const user = await this.adminRepo.findOne({
where: {
id: userId,
email: data.email,
},
});
return user;
}
async validateUser(userId: any, data: any, userType = 'admin') {
let repo = null;
if (userType === 'admin') {
repo = this.adminRepo;
}
if (userType === 'user') {
repo = this.userRepo;
}
if (userType === 'physician') {
repo = this.physicianRepo;
}
const user = await repo.findOne({
where: {
id: userId,
email: data.email,
},
});
return user;
}
async login(loginDto: any, userType: string) {
let user = null;
if (userType === 'admin') {
user = await this.adminLogin(loginDto);
}
if (userType === 'user') {
user = await this.userLogin(loginDto, 'user');
}
if (userType === 'physician') {
user = await this.userLogin(loginDto, 'physician');
}
return {
expires_in: this.configService.get('app.tokenExpiresIn'),
access_token: this.jwtService.sign(this._preparePayload(user, userType)),
};
}
getTemporarySignedUrl(user: any, userType: string): string {
const signedUrl = this.urlSigner.sign(
route().toFullUrl(`${userType}/auth/password-reset`, false, false),
{ ttl: 60 },
[user.id, this.encrypter.sha1(user.email)],
);
return signedUrl;
}
async resetPassword(dto: UserPasswordResetDto, userType: string) {
const user = null;
if (userType === 'admin') {
throw new HttpException('Invalid Request', HttpStatus.BAD_REQUEST);
}
return await this.mailerService
.sendMail({
to: {
name: null,
address: user.email,
},
from: {
name: this.configService.get('mail.fromName'),
address: this.configService.get('mail.fromEmail'),
},
subject: 'Password reset',
template: 'password_reset',
context: {
buttonUrl: this.getTemporarySignedUrl(user, userType),
appName: this.configService.get('app.name'),
supportEmail: this.configService.get('app.supportEmail'),
},
})
.then(success => {
return {
message: 'Password reset link have been sent to your e-mail.',
statusCode: 200,
status: 'success',
};
})
.catch(err => {
// console.log(err);
throw new HttpException(
'Password reset not successful, unable to send welcome message.',
500,
);
});
}
private _preparePayload(user: any, userType?: string): any {
const { id, email, firstName, lastName } = user;
let _data: any = {};
if (userType === 'user') {
_data = user.profile ? { firstName: user.profile.firstName, lastName: user.profile.lastName, email } : {email};
}
if (userType === 'physician') {
_data = user.profile ? { firstName: user.profile.firstName, lastName: user.profile.lastName, email } : {email};
}
if (userType === 'admin') {
_data = { firstName, lastName, email };
}
const data = this.encrypter.encrypt(_data);
const encUserId = this.encrypter.encryptString(id.toString());
return { data, sub: encUserId };
}
}
<file_sep>import { ApiProperty } from '@nestjs/swagger';
import { IsNotEmpty, MaxLength } from 'class-validator';
import {
Entity,
Column,
JoinColumn,
BeforeInsert,
PrimaryGeneratedColumn,
CreateDateColumn,
UpdateDateColumn,
DeleteDateColumn,
BeforeUpdate,
BeforeRemove,
OneToOne,
} from 'typeorm';
import { Physician } from './physician.entity';
@Entity('physician_profiles')
export class PhysicianProfile {
@PrimaryGeneratedColumn({
type: 'bigint',
})
id?: number;
@OneToOne(
() => Physician,
physician => physician.id,
{
eager: false,
onDelete: 'CASCADE',
onUpdate: 'NO ACTION',
},
)
@JoinColumn({ name: 'physician_id' })
physician: Physician | number;
@ApiProperty({ description: 'First name'})
@IsNotEmpty({message: 'First name is required'})
@Column({ length: 100, name: 'first_name' })
firstName: string;
@ApiProperty({ description: 'Last name'})
@IsNotEmpty({message: 'Last name is required'})
@Column({ length: 100, name: 'last_name' })
lastName: string;
@ApiProperty({ description: 'Phone number'})
@IsNotEmpty({message: 'Phone number is required'})
@MaxLength(50, {message: 'Phone number is too long'})
@Column({ length: 50 })
phone: string;
@ApiProperty({ description: 'Physician\'s specialty, e.g DERMATOLOGY, OBSTETRICS AND GYNECOLOGY etc'})
@Column({ type: 'text', nullable: true })
specialty?: string;
@ApiProperty({ description: 'Employer can be Private Practise or State/Federal Government'})
@Column({ type: 'text', nullable: true })
employer?: string;
@ApiProperty({ description: 'Current Hospital of practise'})
@Column({ type: 'text', nullable: true })
hospital?: string;
@ApiProperty({ description: 'Tell us about yourself'})
@Column({ type: 'text', nullable: true })
biography?: string;
@ApiProperty({ description: 'Are you licensed?'})
@Column({ type: 'tinyint', default: 0 })
licensed?: number;
@ApiProperty({ description: 'License issuance date'})
@Column({ name: 'license_issue_date', type: 'date', nullable: true })
licenseIssueDate: Date;
@ApiProperty({ description: 'License expiry date'})
@Column({ name: 'validity_date', type: 'date', nullable: true })
validityDate: Date;
@ApiProperty({ description: '(Internal use) Specify if license has been verified'})
@Column({ name: 'license_verified', type: 'tinyint', default: 0 })
licenseVerified: number;
@Column({ name: 'verified_by', nullable: true })
verifiedBy: string;
@ApiProperty({ description: 'Default public post mode'})
@Column({ name: 'is_anonymous', type: 'tinyint', default: 0 })
isAnonymous?: number;
@CreateDateColumn({
name: 'created_at',
type: 'timestamp',
})
createdAt?: Date;
@UpdateDateColumn({
name: 'updated_at',
type: 'timestamp',
})
updatedAt?: Date;
@DeleteDateColumn({
name: 'deleted_at',
type: 'timestamp',
})
deletedAt?: Date;
@BeforeInsert()
beforeCreate(): null {
this.createdAt = new Date();
this.updatedAt = this.createdAt;
return null;
}
@BeforeUpdate()
beforeUpdate(): null {
this.updatedAt = new Date();
return null;
}
@BeforeRemove()
beforeDestroy(): null {
this.deletedAt = new Date();
return;
}
}
<file_sep>/* eslint-disable @typescript-eslint/explicit-module-boundary-types */
import { PhysicianAuthGuard } from './../../auth/guards/physician.guard';
import { ApiBearerAuth, ApiNotFoundResponse, ApiOkResponse, ApiTags, ApiUnprocessableEntityResponse } from '@nestjs/swagger';
import { PhysiciansService } from './../../admin/physicians/physicians.service';
import { PhysicianRegistrationDto } from './dto/physician-registration.dto';
import { Body, Controller, NotFoundException, Post, Request, UnprocessableEntityException, UseGuards } from '@nestjs/common';
import { Physician, PhysicianProfile } from './../../shared/database';
import { plainToClass } from 'class-transformer';
import { PasswordUpdateDto } from './../../shared/dto/password-update.dto';
import { CompletePhysicianRegistrationDto } from './dto/complete-physician-registration.dto';
import { PhysicianRegistrationUpdateResponseDto } from './dto/physician-registration-update_response.dto';
interface PhysicianAPIResponse {
message: string,
success?: boolean,
error?:boolean,
data?: any
}
@ApiTags('Physicians')
@Controller('physician')
export class RegistrationController {
constructor(
public service: PhysiciansService
){}
@Post('registration')
async registerPhysician(
@Body() dto: PhysicianRegistrationDto,
): Promise<any> {
const data: Physician = plainToClass(Physician, dto);
const user = await this.service.repository.insert(data);
if (!user) {
return new UnprocessableEntityException(Error('Registration failed'))
}
return {
message: 'Physician Registration successful',
success: true,
};
}
@UseGuards(PhysicianAuthGuard)
@ApiBearerAuth('JWT')
@ApiUnprocessableEntityResponse({
description: 'Returns this error when operation fails'
})
@ApiNotFoundResponse({
description: 'Returns this error when user is not found'
})
@ApiOkResponse({
description: 'Returns complete Physician profile on successful operation',
type: PhysicianRegistrationUpdateResponseDto
})
@Post('complete-registration')
async completeRegistration(
@Request() req: any,
@Body() dto: CompletePhysicianRegistrationDto,
): Promise<any> {
const user: Physician = <Physician>req.user;
if (!user) {
return new NotFoundException(Error('Physician not found'))
}
try {
const profile = new PhysicianProfile();
profile.firstName = dto.firstName;
profile.lastName = dto.lastName;
profile.biography = dto.biography;
profile.phone = dto.phone;
profile.licensed = dto.licensed;
profile.specialty = dto.specialty;
profile.employer = dto.employer;
profile.hospital = dto.hospital;
profile.licenseIssueDate = dto.licenseIssueDate;
profile.validityDate = dto.validityDate;
profile.isAnonymous = dto.isAnonymous;
user.avatar = dto.avatar;
user.profile = profile;
this.service.repository.save(user);
return {
message: 'Physician registration updated successfully',
success: true,
data: user
};
} catch (ex) {
return new UnprocessableEntityException(Error('Operation failed'))
}
}
@UseGuards(PhysicianAuthGuard)
@ApiBearerAuth('JWT')
@ApiUnprocessableEntityResponse({
description: 'Returns this error when operation fails'
})
@ApiNotFoundResponse({
description: 'Returns this error when user is not found'
})
@ApiOkResponse({
description: 'Returns complete message on successful operation',
type: 'Json response',
})
@Post('update-password')
async updateProfile(
@Request() req: any,
@Body() dto: PasswordUpdateDto,
): Promise<any> {
const user: Physician = <Physician>req.user;
if (!user) {
return new NotFoundException(Error('Physician not found'))
}
try {
user.password = <PASSWORD>;
this.service.repository.save(user);
return {
message: 'Physician password updated successfully',
success: true,
} as PhysicianAPIResponse;
} catch (ex) {
return new UnprocessableEntityException(Error('Operation failed'))
}
}
}
<file_sep>import { Module } from '@nestjs/common';
import { TypeOrmModule } from '@nestjs/typeorm';
import { AdminUsersService } from './admin-users.service';
import { Permission, Role, Admin, DatabaseModule } from '../../shared/database';
import { AdminUsersController } from './admin-users.controller';
import { RolesService } from '../roles/roles.service';
@Module({
imports: [
DatabaseModule,
TypeOrmModule.forFeature([Admin, Role, Permission]),
],
providers: [AdminUsersService, RolesService],
controllers: [AdminUsersController],
})
export class AdminUsersModule {}
<file_sep>
<p align="left">Confyde API built with Nest - a progressive <a href="http://nodejs.org" target="blank">Node.js</a> framework for building efficient and scalable server-side applications.</p>
## Description
[Nest](https://github.com/nestjs/nest) framework TypeScript starter repository.
## Installation
```bash
$ npm install
```
## Configure the App
* Create the database for the application, copy `.env.example` to `.env`
or run
```
$ cp .env.example .env
```
* Update the following sections to reflect your database setup
```
DB_HOST=127.0.0.1
DB_PORT=3306
DB_DATABASE=confyde
DB_USERNAME=dbuser
DB_PASSWORD=<PASSWORD>
and
TYPEORM_HOST = 127.0.0.1
TYPEORM_PORT = 3306
TYPEORM_DATABASE = confyde
TYPEORM_USERNAME = dbuser
TYPEORM_PASSWORD = <PASSWORD>
```
## Running the app
```bash
# development
$ npm run start
# watch mode
$ npm run start:dev
# production mode
$ npm run start:prod
```
## Preview Open API documentation in Swagger
Visit `http://localhost:4000/api/docs`
## Test
```bash
# unit tests
$ npm run test
# e2e tests
$ npm run test:e2e
# test coverage
$ npm run test:cov
```
<file_sep>import { ApiProperty } from '@nestjs/swagger';
import {
IsEmail,
IsNotEmpty,
MaxLength,
} from 'class-validator';
import { IsEqual, IsUnique } from '../../../shared/validations';
import { User } from '../../../shared/database';
export class UserRegistrationDto {
@ApiProperty({ description: 'Email address'})
@IsUnique(User)
@MaxLength(100, { message: 'Email too Long' })
@IsNotEmpty({ message: 'Email is required' })
@IsEmail({}, { message: 'Enter a valid email address' })
email: string;
@ApiProperty({ description: 'Username'})
@IsUnique(User)
@MaxLength(20, { message: 'Username too Long' })
@IsNotEmpty({ message: 'Username is required' })
username: string;
@ApiProperty({ description: 'A Secure password'})
@IsNotEmpty({ message: 'Password is required' })
password: string;
@ApiProperty({ description: 'Confirm Password'})
@IsEqual('password', { message: 'Passwords does not match' })
@IsNotEmpty({ message: 'Password Confirmation is required' })
passwordConfirmation: string;
}
<file_sep>import { ApiProperty, PartialType } from '@nestjs/swagger';
import { IsEmail, IsNotEmpty } from 'class-validator';
import { CreateUserDto } from './create-user.dto';
export class UpdateUserDto extends PartialType(CreateUserDto) {
@ApiProperty()
@IsNotEmpty({ message: 'Email is required' })
@IsEmail({}, { message: 'Enter a valid email address' })
email: string;
}
<file_sep>import { Module } from '@nestjs/common';
import { AppController } from './app.controller';
import { AppService } from './app.service';
import { PassportModule } from '@nestjs/passport';
import { DatabaseModule } from './shared/database/database.module';
import { ConfigModule, ConfigService } from '@nestjs/config';
import { MailerModule } from '@nestjs-modules/mailer';
import { HandlebarsAdapter } from '@nestjs-modules/mailer/dist/adapters/handlebars.adapter';
import { AdminModule } from './admin/admin.module';
import { AuthModule } from './auth/auth.module';
// Import Configuration files
import databaseConfig from './shared/config/database.config';
import appConfig from './shared/config/app.config';
import mailConfig from './shared/config/mail.config';
import * as handlebars from 'handlebars';
import { Encrypter } from './shared/encrypter';
import { FileUploaderModule } from './shared/file-uploader/file-uploader.module';
import { UserModule } from './user/user.module';
import { PhysicianModule } from './physician/physician.module';
const helpers = (configService: ConfigService) => ({
config(value: string) {
const context = {
value: configService.get(value),
};
const template = handlebars.compile('{{ value }}');
const compiled = template(context);
return compiled;
},
});
@Module({
imports: [
ConfigModule.forRoot({
isGlobal: true,
load: [databaseConfig, appConfig, mailConfig],
}),
MailerModule.forRootAsync({
useFactory: (configService: ConfigService) => ({
transport: {
host: configService.get('mail.host'),
port: configService.get('mail.port'),
secure: false, // configService.get('mail.secure'), // upgrade later with STARTTLS
// requireTLS: true,
auth: {
user: configService.get('mail.username'),
pass: configService.get('mail.password'),
},
tls: {
ciphers: 'SSLv3',
rejectUnauthorized: false,
},
// debug: true,
// logger: true,
},
defaults: {
from: `"${configService.get('mail.from_name')}" <${configService.get(
'mail.from_email',
)}>`,
},
preview: true,
template: {
dir: `${process.cwd()}/templates/email/`,
adapter: new HandlebarsAdapter(helpers(configService)),
options: {
strict: true,
},
},
}),
inject: [ConfigService],
}),
PassportModule,
DatabaseModule,
AdminModule,
AuthModule,
FileUploaderModule,
UserModule,
PhysicianModule,
],
controllers: [AppController],
providers: [AppService, Encrypter],
exports: [Encrypter],
})
export class AppModule {}
<file_sep>import { ApiProperty } from '@nestjs/swagger';
import { IsNotEmpty, IsString } from 'class-validator';
export class GeneratePermissionsDto {
@ApiProperty({
description: 'Name of Entity or Database table',
})
@IsNotEmpty({ message: 'Entity/Table name is required' })
entity: string;
@ApiProperty({
type: [String] || String,
})
@IsString({ each: true, message: 'Valid Crud Action(s) is required' })
@IsNotEmpty({ message: 'Crud Action is required' })
crud: [string] | string;
}
<file_sep>import {
Entity,
Column,
ManyToMany,
JoinTable,
PrimaryGeneratedColumn,
CreateDateColumn,
UpdateDateColumn,
BeforeInsert,
BeforeUpdate,
} from 'typeorm';
import { Permission } from './permission.entity';
@Entity('roles')
export class Role {
@PrimaryGeneratedColumn({
type: 'bigint',
})
id: number;
@Column({ length: 100, unique: true })
name: string;
@Column({ length: 100, name: 'display_name' })
displayName: string;
@ManyToMany(() => Permission, { eager: true, cascade: true })
@JoinTable({
name: 'role_permissions', // table name for the junction table of this relation
joinColumn: {
name: 'role_id',
},
inverseJoinColumn: {
name: 'permission_id',
},
})
permissions: Permission[];
@CreateDateColumn({
name: 'created_at',
type: 'timestamp',
})
createdAt: Date;
@UpdateDateColumn({
name: 'updated_at',
type: 'timestamp',
})
updatedAt: Date;
@BeforeInsert()
beforeCreate(): null {
this.createdAt = new Date();
this.updatedAt = this.createdAt;
return null;
}
@BeforeUpdate()
beforeUpdate(): null {
this.updatedAt = new Date();
return null;
}
}
<file_sep>import { ApiProperty } from '@nestjs/swagger';
import {
Entity,
Column,
JoinColumn,
BeforeInsert,
PrimaryGeneratedColumn,
CreateDateColumn,
UpdateDateColumn,
DeleteDateColumn,
BeforeUpdate,
BeforeRemove,
OneToOne,
} from 'typeorm';
import { User } from './user.entity';
@Entity('user_profiles')
export class UserProfile {
@PrimaryGeneratedColumn({
type: 'bigint',
})
id?: number;
@OneToOne(
() => User,
user => user.id,
{
eager: false,
onDelete: 'CASCADE',
onUpdate: 'NO ACTION'
},
)
@JoinColumn({ name: 'user_id' })
user: User | number;
@ApiProperty()
@Column({ length: 100, name: 'first_name' })
firstName: string;
@ApiProperty()
@Column({ length: 100, name: 'last_name' })
lastName: string;
@ApiProperty()
@Column({ length: 50 })
phone: string;
@ApiProperty()
@Column({ type: 'text', nullable: true })
biography?: string;
@ApiProperty()
@Column({ name: 'last_treated_ailment', type: 'text', nullable: true })
lastTreatedAilment?: string;
@ApiProperty()
@Column({ name: 'is_anonymous', type: 'tinyint', default: 0 })
isAnonymous?: number;
@CreateDateColumn({
name: 'created_at',
type: 'timestamp',
})
createdAt?: Date;
@UpdateDateColumn({
name: 'updated_at',
type: 'timestamp',
})
updatedAt?: Date;
@DeleteDateColumn({
name: 'deleted_at',
type: 'timestamp',
})
deletedAt?: Date;
@BeforeInsert()
beforeCreate(): null {
this.createdAt = new Date();
this.updatedAt = this.createdAt;
return null;
}
@BeforeUpdate()
beforeUpdate(): null {
this.updatedAt = new Date();
return null;
}
@BeforeRemove()
beforeDestroy(): null {
this.deletedAt = new Date();
return;
}
}
<file_sep>import { ApiProperty } from '@nestjs/swagger';
import {
IsEmail,
IsNotEmpty,
IsNumber,
MaxLength,
IsOptional,
Allow,
} from 'class-validator';
import { IsEqual, IsUnique } from '../../../shared/validations';
import { Admin } from '../../../shared/database';
export class CreateAdminDto {
@ApiProperty()
@MaxLength(50, { message: 'Firstname too Long' })
@IsNotEmpty({ message: 'Firstname is required' })
firstName: string;
@ApiProperty()
@MaxLength(50, { message: 'Lastname too Long' })
@IsNotEmpty({ message: 'Lastname is required' })
lastName: string;
@ApiProperty()
@IsUnique(Admin)
@MaxLength(50, { message: 'Email too Long' })
@IsNotEmpty({ message: 'Email is required' })
@IsEmail({}, { message: 'Enter a valid email address' })
email: string;
@ApiProperty()
@IsNotEmpty({ message: 'Password is required' })
password: string;
@ApiProperty()
@IsEqual('password', { message: 'Passwords does not match' })
@IsNotEmpty({ message: 'Password Confirmation is required' })
passwordConfirmation: string;
@ApiProperty()
@IsOptional()
avatar: string;
@ApiProperty({ type: Number })
@IsOptional()
@IsNumber({}, { message: 'Invalid Role' })
role: number;
@ApiProperty({ type: [Number] })
@IsOptional()
@Allow()
roles: number[];
@ApiProperty({ type: Number })
@IsNotEmpty({ message: 'Status is required' })
status: number;
}
<file_sep>import slug from 'limax';
import { readFileSync, existsSync, mkdirSync, writeFileSync } from 'fs';
import * as crypto from 'crypto';
import { format as FormatUrl } from 'url';
import { ConnectionOptions } from 'typeorm';
const titleCase = (str: string) => {
return str
.toLowerCase()
.split(' ')
.map(word => {
return word.charAt(0).toUpperCase() + word.slice(1);
})
.join(' ');
};
const generateKeys = (path: string, key: string) => {
const { publicKey, privateKey } = crypto.generateKeyPairSync('rsa', {
modulusLength: 4096,
publicKeyEncoding: {
type: 'spki',
format: 'pem',
},
privateKeyEncoding: {
type: 'pkcs8',
format: 'pem',
cipher: 'aes-256-cbc',
passphrase: key,
},
});
if (privateKey && publicKey) {
writeFileSync(`${path}/jwt-private.key`, privateKey);
writeFileSync(`${path}/jwt-public.key`, publicKey);
}
};
const ensureKeys = (appKey?: string, force?: boolean) => {
const path = `${process.cwd()}/.keys`;
if (!existsSync(path)) {
mkdirSync(path);
}
if (
(!existsSync(`${path}/jwt-private.key`) &&
!existsSync(`${path}/jwt-public.key`)) ||
force
) {
generateKeys(path, appKey);
}
};
const getKey = (keyType: string): string => {
ensureKeys();
if (keyType === 'private') {
return readFileSync(`${process.cwd()}/.keys/jwt-private.key`).toString();
}
if (keyType === 'public') {
return readFileSync(`${process.cwd()}/.keys/jwt-public.key`).toString();
}
return null;
};
// const encryptData = (value: string, key: Buffer): string => {
// const cipher = crypto.createCipheriv(algorithm, key, cryptoIv);
// let encrypted = cipher.update(value, 'utf8', 'hex');
// encrypted += cipher.final('hex');
// return encrypted;
// };
// const decryptData = (value: string, key: Buffer): string => {
// const cipher = crypto.createDecipheriv(algorithm, key, cryptoIv);
// let decrypted = cipher.update(value, 'hex', 'utf8');
// decrypted += cipher.final('utf8');
// return decrypted;
// };
export const Helpers = {
titleCase,
getKey,
ensureKeys,
};
export const slugify = slug;
export const cleanLicensePlate = (value: string): string => {
return value
.toUpperCase()
.split(' ')
.join('')
.replace(' ', '')
.replace('-', '')
.replace('_', '');
};
/**
* Return a timestamp, optionally passing in extra seconds to add to the timestamp.
*
* @param {Number} ttl The extra seconds to add to the timestamp
* @return {Number} A timestamp in seconds
*/
export const now = (ttl?: number): number => {
if (ttl === undefined || ttl === null) {
ttl = 0;
} else {
ttl = ttl * 60;
}
return Math.floor(Date.now() / 1000) + ttl;
};
export const toBool = (value: any): boolean => {
return (
/true/i.test(value.toLowerCase()) ||
value === 1 ||
value === '1' ||
value.toLowerCase() === 'yes'
);
};
export const route = () => {
const root = (frontend?: boolean): string => {
return FormatUrl(
new URL(
!frontend
? [process.env.APP_URL, process.env.APP_BASE_PATH].join('/')
: process.env.APP_FRONTEND_URL,
),
);
};
const toFullUrl = (path: string, frontend?: boolean, relative?: boolean) => {
const urlChunks = [];
if (!relative) urlChunks.push(root(frontend));
urlChunks.push(path);
return FormatUrl(new URL(urlChunks.join('/')));
};
return { root, toFullUrl };
};
// export const TestDBConnectionInfo: ConnectionOptions = {
// type: 'sqlite',
// database: `${process.cwd()}/data/e2e-tests.sqlite`,
// logging: true,
// };
<file_sep>import {
Entity,
Column,
ManyToOne,
JoinColumn,
ManyToMany,
JoinTable,
BeforeInsert,
PrimaryGeneratedColumn,
CreateDateColumn,
UpdateDateColumn,
DeleteDateColumn,
BeforeUpdate,
BeforeRemove,
} from 'typeorm';
import * as bcrypt from 'bcrypt';
import { Role } from './role.entity';
@Entity('admins')
export class Admin {
@PrimaryGeneratedColumn({
type: 'bigint',
})
id?: number;
@Column({ length: 100, name: 'first_name' })
firstName: string;
@Column({ length: 100, name: 'last_name' })
lastName: string;
@Column({ length: 100, unique: true })
email: string;
@Column({ type: 'text', nullable: true })
avatar?: string;
@Column({ length: 191 })
password: string;
@Column({ type: 'tinyint', default: 1 })
status: number;
@ManyToOne(() => Role, { eager: false, cascade: true })
@JoinColumn({ name: 'role_id' })
role: Role | number;
@ManyToMany(() => Role, { eager: true, cascade: true })
@JoinTable({
name: 'admin_roles', // table name for the junction table of this relation
joinColumn: {
name: 'admin_id',
},
inverseJoinColumn: {
name: 'role_id',
},
})
roles: Role[] | number[];
@CreateDateColumn({
name: 'created_at',
type: 'timestamp',
})
createdAt?: Date;
@UpdateDateColumn({
name: 'updated_at',
type: 'timestamp',
})
updatedAt?: Date;
@DeleteDateColumn({
name: 'deleted_at',
type: 'timestamp',
})
deletedAt?: Date;
@BeforeInsert()
async beforeCreate() {
this.createdAt = new Date();
this.updatedAt = this.createdAt;
await this.hashPassword();
}
@BeforeUpdate()
async beforeUpdate() {
this.updatedAt = new Date();
await this.hashPassword();
}
@BeforeRemove()
beforeDestroy() {
this.deletedAt = new Date();
}
// @BeforeInsert()
async hashPassword() {
if (this.password) {
this.password = await bcrypt.hash(this.password, 10);
}
}
}
<file_sep>import {
Entity,
PrimaryGeneratedColumn,
Column,
CreateDateColumn,
UpdateDateColumn,
BeforeInsert,
BeforeUpdate,
} from 'typeorm';
@Entity('permissions')
export class Permission {
@PrimaryGeneratedColumn({
type: 'bigint',
})
id: number;
@Column({ length: 100, unique: true })
name: string;
@Column({ length: 100, name: 'display_name' })
displayName: string;
@Column({ length: 100, name: 'group_name', nullable: true })
groupName: string;
@CreateDateColumn({
name: 'created_at',
type: 'timestamp',
})
createdAt: Date;
@UpdateDateColumn({
name: 'updated_at',
type: 'timestamp',
})
updatedAt: Date;
@BeforeInsert()
beforeCreate(): null {
this.createdAt = new Date();
this.updatedAt = this.createdAt;
return;
}
@BeforeUpdate()
beforeUpdate(): null {
this.updatedAt = new Date();
return;
}
}
<file_sep>import { Controller, Patch, Post, UnprocessableEntityException, UseGuards } from '@nestjs/common';
import { ApiBearerAuth, ApiBody, ApiTags } from '@nestjs/swagger';
import { Crud, CrudController, CrudRequest, Override, ParsedBody, ParsedRequest } from '@nestjsx/crud';
import { plainToClass } from 'class-transformer';
import { AdminAuthGuard } from 'src/auth/guards/admin.guard';
import { ValidateUniqueParam } from 'src/shared/validations';
import { User, UserProfile } from './../../shared/database';
import { CreateUserDto } from './dto/create-user.dto';
import { UpdateUserDto } from './dto/update-user.dto';
import { UsersService } from './users.service';
@Crud({
model: {
type: User,
},
dto: {
create: CreateUserDto,
update: UpdateUserDto,
replace: UpdateUserDto,
},
routes: {
exclude: ['replaceOneBase', 'createManyBase'],
deleteOneBase: {
returnDeleted: false,
},
},
params: {
id: {
field: 'id',
type: 'string',
primary: true,
},
},
query: {
alwaysPaginate: true,
exclude: ['password', 'deletedAt'],
join: {
profile: {
// persist: ['id', 'name', 'display_name'],
exclude: ['createdAt', 'updatedAt'],
eager: true,
},
medicalHisory: {
// persist: ['id', 'name', 'display_name'],
exclude: ['createdAt', 'updatedAt'],
eager: true,
},
},
},
})
@ApiTags('Users (Admin)')
@UseGuards(AdminAuthGuard)
@ApiBearerAuth('JWT')
@Controller('admin/users')
export class UsersController implements CrudController<User> {
constructor(
public service: UsersService,
) {}
get base(): CrudController<User> {
return this;
}
@Override()
@Post()
async createOne(
@ParsedRequest() req: CrudRequest,
@ParsedBody() dto: CreateUserDto,
): Promise<any> {
const data: User = plainToClass(User, dto);
const profile: UserProfile = dto.profile;
if (!profile.firstName) {
return new UnprocessableEntityException({
message: 'Validation error occured',
errors: {
operator: ['First name is required'],
}
});
}
if (!profile.lastName) {
return new UnprocessableEntityException({
message: 'Validation error occured',
errors: {
operator: ['Last name is required'],
}
});
}
if (!profile.phone) {
return new UnprocessableEntityException({
message: 'Validation error occured',
errors: {
operator: ['Phone number is required'],
}
});
}
return this.base.createOneBase(req, data);
}
@ApiBody({
type: UpdateUserDto,
})
@Override()
@Patch('users/:id')
async updateOne(
@ParsedRequest() req: CrudRequest,
@ValidateUniqueParam({
name: 'email',
field: 'email',
entity: User,
mode: 'Update',
table: 'users',
})
uniqEmail: boolean,
@ParsedBody() dto: Partial<UpdateUserDto>,
): Promise<any> {
const data: User = plainToClass(User, dto);
const profile: UserProfile = data.profile;
if (!profile.firstName) {
return new UnprocessableEntityException({
message: 'Validation error occured',
errors: {
operator: ['First name is required'],
}
});
}
if (!profile.lastName) {
return new UnprocessableEntityException({
message: 'Validation error occured',
errors: {
operator: ['Last name is required'],
}
});
}
if (!profile.phone) {
return new UnprocessableEntityException({
message: 'Validation error occured',
errors: {
operator: ['Phone number is required'],
}
});
}
return this.base.updateOneBase(req, data);
}
}
<file_sep>import { ApiProperty, PartialType } from '@nestjs/swagger';
import { IsNotEmpty } from 'class-validator';
import { CreateRolesDto } from './create-roles.dto';
export class UpdateRolesDto extends PartialType(CreateRolesDto) {
@ApiProperty()
@IsNotEmpty({ message: 'Name is required' })
name: string;
@ApiProperty()
@IsNotEmpty({ message: 'Display name is required' })
displayName: string;
}
<file_sep>import {
ValidationArguments,
registerDecorator,
ValidationOptions,
} from 'class-validator';
// @ValidatorConstraint({ async: false })
// export class IsEqualTo implements ValidatorConstraintInterface {
// validate(text: string, args: ValidationArguments) {
// // tslint:disable-next-line: no-console
// console.log('EqualTo', text);
// return false;
// return text.length > 1 && text.length < 10; // for async validations you must return a Promise<boolean> here
// }
// defaultMessage(args: ValidationArguments) {
// return 'Text ($value) is too short or too long!';
// }
// }
// export function IsEqual(fieldToCompare: string, validationOptions?: ValidationOptions) {
// // tslint:disable-next-line: ban-types
// return (object: Object, propertyName: string) => {
// registerDecorator({
// name: 'equalTo',
// target: object.constructor,
// propertyName,
// options: validationOptions,
// constraints: [fieldToCompare],
// validator: IsEqualTo,
// });
// };
// }
export const IsEqual = (
property: string,
validationOptions?: ValidationOptions,
) => {
return (object: any, propertyName: string): any => {
registerDecorator({
name: 'isEqual',
target: object.constructor,
propertyName,
constraints: [property],
options: validationOptions,
validator: {
validate(value: any, args: ValidationArguments) {
const [relatedPropertyName] = args.constraints;
const relatedValue = (args.object as any)[relatedPropertyName];
return typeof value === typeof relatedValue && value === relatedValue;
},
},
});
};
};
<file_sep>import { ApiProperty, PartialType } from '@nestjs/swagger';
import { IsEmail, IsNotEmpty } from 'class-validator';
import { CreatePhysicianDto } from './create-physician.dto';
export class UpdatePhysicianDto extends PartialType(CreatePhysicianDto) {
@ApiProperty()
@IsNotEmpty({ message: 'Email is required' })
@IsEmail({}, { message: 'Enter a valid email address' })
email: string;
@ApiProperty()
@IsNotEmpty({ message: 'Username is required' })
username: string;
}
<file_sep>import { ApiProperty } from '@nestjs/swagger';
import {
IsNotEmpty,
IsOptional,
} from 'class-validator';
export class CompleteUserRegistrationDto {
@ApiProperty({ description: 'First name'})
@IsNotEmpty({ message: 'First name is required' })
firstName: string;
@ApiProperty({ description: 'Last name'})
@IsNotEmpty({ message: 'Last name is required' })
lastName: string;
@ApiProperty({ description: 'Phone number'})
@IsNotEmpty({ message: 'Phone is required' })
phone: string;
@ApiProperty({ description: 'User Introduction', required: false})
@IsOptional()
biography: string;
@ApiProperty({ description: 'Stay Anonymous', type: Number})
@IsNotEmpty({ message: 'Phone is required' })
isAnonymous: number;
@ApiProperty({ description: 'Last ailment treated', required: false})
@IsOptional()
lastTreatedAilment: string;
@ApiProperty({ description: 'Profile Picture/Avatar', required: false})
@IsOptional()
avatar?: string;
}
<file_sep>import { Module } from '@nestjs/common';
import { RegistrationModule } from './registration/registration.module';
@Module({
imports: [RegistrationModule]
})
export class UserModule {}
<file_sep>APP_NAME=Confyde
APP_ENV=local
APP_KEY=<KEY>
APP_DEBUG=true
APP_URL=http://localhost:4000
APP_BASE_PATH=api
APP_FRONTEND_URL=http://localhost:3000
PORT=4000
# DATABASE
DB_CONNECTION=mysql
DB_HOST=127.0.0.1
DB_PORT=3306
DB_DATABASE=confyde
DB_USERNAME=dbuser
DB_PASSWORD=<PASSWORD>
DB_PREFIX=
DB_LEGACY_SPATIAL=false # Set to true if using MySQL verion below 8
TEST_DB_NAME=confyde_test
# DevelMail
MAIL_DRIVER=smtp
MAIL_HOST=smtp.develmail.com
MAIL_PORT=587
MAIL_USERNAME=
MAIL_PASSWORD=
MAIL_ENCRYPTION=ssl
MAIL_FROM_NAME="Confyde"
MAIL_FROM_EMAIL=<EMAIL>
MAIL_DEFAULT_FOOTER=Confyde Health Support Services, Africa
# DATABASE
TYPEORM_CONNECTION = mysql
TYPEORM_HOST = 127.0.0.1
TYPEORM_PORT = 3306
TYPEORM_DATABASE = confyde
TYPEORM_USERNAME = dbuser
TYPEORM_PASSWORD = <PASSWORD>
TYPEORM_ENTITY_PREFIX =
TYPEORM_SYNCHRONIZE = false
TYPEORM_LOGGING = true
TYPEORM_ENTITIES = ./dist/shared/database/entity/**/*.js
TYPEORM_MIGRATIONS = ./dist/shared/database/migrations/*.js
TYPEORM_ENTITIES_DIR = ./src/shared/database/entity
TYPEORM_MIGRATIONS_DIR = ./src/shared/database/migrations
<file_sep>import {
Entity,
Column,
JoinColumn,
BeforeInsert,
PrimaryGeneratedColumn,
CreateDateColumn,
UpdateDateColumn,
DeleteDateColumn,
BeforeUpdate,
BeforeRemove,
OneToOne,
} from 'typeorm';
import { User } from './user.entity';
@Entity('user_medical_data')
export class UserMedicalData {
@PrimaryGeneratedColumn({
type: 'bigint',
})
id?: number;
@OneToOne(
() => User,
user => user.id,
{
eager: false,
onDelete: 'CASCADE',
onUpdate: 'NO ACTION',
},
)
@JoinColumn({ name: 'user_id' })
user: User | number;
@Column({ length: 50, name: 'blood_group' })
bloodGroup: string;
@Column({ length: 50, name: 'genotype' })
genotype: string;
@Column({ name: 'is_allergic', type: 'tinyint', default: 0 })
isAllergic?: number;
@Column({ type: 'text', nullable: true })
allerygy?: string;
@CreateDateColumn({
name: 'created_at',
type: 'timestamp',
})
createdAt?: Date;
@UpdateDateColumn({
name: 'updated_at',
type: 'timestamp',
})
updatedAt?: Date;
@DeleteDateColumn({
name: 'deleted_at',
type: 'timestamp',
})
deletedAt?: Date;
@BeforeInsert()
beforeCreate(): null {
this.createdAt = new Date();
this.updatedAt = this.createdAt;
return null;
}
@BeforeUpdate()
beforeUpdate(): null {
this.updatedAt = new Date();
return null;
}
@BeforeRemove()
beforeDestroy(): null {
this.deletedAt = new Date();
return;
}
}
<file_sep>import { Injectable, HttpException, HttpStatus } from '@nestjs/common';
import { route, now } from './helpers';
import { Url, parse, format as FormatUrl } from 'url';
import { Encrypter } from './encrypter';
interface ISignerOptions {
ttl?: number;
method?: string;
expire?: number;
}
@Injectable()
export class UrlSigner {
ENC_CHARS = {
'+': '-',
'/': '_',
'=': '',
};
DEC_CHARS = {
'-': '+',
_: '/',
// '.': '=',
};
sigKey = 'sig';
expKey = 'expires';
expiry = 3600;
constructor(public encrypter: Encrypter) {}
public hash(url: URL) {
url.searchParams.delete(this.sigKey);
url.searchParams.sort();
const hmac: string = this.encrypter.hash_hmac('sha256', FormatUrl(url));
return hmac;
}
public sign(url: string, options?: ISignerOptions, params?: any) {
const ttl: number = (options && options.ttl) ?? null;
const expire: number = ttl && now(ttl);
if (params.length) {
url += `/${params.join('/')}`;
}
const uri = new URL(url);
uri.searchParams.delete(this.expKey);
if (expire) uri.searchParams.append(this.expKey, expire.toString());
uri.searchParams.set(this.sigKey, this.hash(uri));
return FormatUrl(uri);
}
public verify(url: string, options?: ISignerOptions) {
if (!this.signatureNotExpired(url)) {
throw new HttpException(
'The requested Url has expired',
HttpStatus.BAD_REQUEST,
);
}
return this.validSignature(url);
}
public validSignature(url: string) {
const uri = new URL(url);
const signature = uri.searchParams.get(this.sigKey);
uri.searchParams.sort();
return this.encrypter.hash_equal(this.hash(uri), signature);
}
public signatureNotExpired(url: string) {
const uri = new URL(url);
let expire: any = uri.searchParams.get(this.expKey);
expire = expire ? parseInt(uri.searchParams.get(this.expKey), 10) : null;
return expire ? expire > Date.now() / 1000 : true;
}
/**
* encode base64 string url safe
* @param {String} base64 - base64 encoded string
* @return {String} url-safe-base64 encoded
*/
private encodeUrlSafe(base64: string): string {
return base64.replace(/[+/=]/g, m => this.ENC_CHARS[m]);
}
/**
* decode url-safe-base64 string to base64
* @param {String} safe - url-safe-base64 string
* @return {String} base64 encoded
*/
private decodeUrlSafe(safe: string): string {
return safe.replace(/[-_.]/g, m => this.DEC_CHARS[m]);
}
}
<file_sep>import { ApiProperty, PartialType } from '@nestjs/swagger';
import { IsNotEmpty } from 'class-validator';
import { CreatePermissionsDto } from './create-permissions.dto';
export class UpdatePermissionsDto extends PartialType(CreatePermissionsDto) {
@ApiProperty()
@IsNotEmpty({ message: 'Name is required' })
name: string;
@ApiProperty()
@IsNotEmpty({ message: 'Display name is required' })
displayName: string;
}
<file_sep>import { PhysicianAuthController } from './user-auths/physician-auth.controller';
import { UserAuthController } from './user-auths/user-auth.controller';
import { PhysicianStrategy } from './strategies/physician.strategy';
import { UserStrategy } from './strategies/user.strategy';
import { Module } from '@nestjs/common';
import { AuthService } from './auth.service';
import { DatabaseModule, Admin, User, Physician } from '../shared/database';
import { TypeOrmModule } from '@nestjs/typeorm';
import { PassportModule } from '@nestjs/passport';
import { JwtModule } from '@nestjs/jwt';
import { readFileSync } from 'fs';
import { AuthController } from './auth.controller';
import { ConfigModule, ConfigService } from '@nestjs/config';
import { Helpers } from '../shared/helpers';
import { Encrypter } from '../shared/encrypter';
import { AdminStrategy } from './strategies/admin.strategy';
import { AdminAuthController } from './user-auths/admin-auth.controller';
import { UrlSigner } from './../shared/url-signer';
@Module({
imports: [
DatabaseModule,
TypeOrmModule.forFeature([Admin, User, Physician]),
PassportModule.register({ defaultStrategy: 'customer' }),
JwtModule.registerAsync({
imports: [ConfigModule],
useFactory: async (configService: ConfigService) => {
const appKey = configService.get('app.key');
Helpers.ensureKeys(appKey);
return {
privateKey: {
key: readFileSync(
`${process.cwd()}/.keys/jwt-private.key`,
).toString(),
passphrase: appKey,
},
signOptions: { expiresIn: '1w', algorithm: 'RS256' },
};
},
inject: [ConfigService],
}),
// JwtModule.register({
// privateKey: readFileSync(`${process.cwd()}/.keys/jwt-private.key`).toString(),
// signOptions: { expiresIn: '60s', algorithm: 'RS256' },
// }),
],
providers: [AuthService, AdminStrategy, UserStrategy, PhysicianStrategy, Encrypter, UrlSigner],
controllers: [AuthController, AdminAuthController, UserAuthController, PhysicianAuthController],
})
export class AuthModule {}
<file_sep>import { Helpers } from '../../shared/helpers';
import { GeneratePermissionsDto } from './dto/generate-permissions.dto';
import {
Controller,
Patch,
Post,
UseInterceptors,
Body,
UseGuards,
} from '@nestjs/common';
import { ApiTags, ApiBody, ApiOperation, ApiBearerAuth } from '@nestjs/swagger';
import {
Crud,
CrudController,
Override,
CrudRequest,
ParsedRequest,
ParsedBody,
CrudRequestInterceptor,
CreateManyDto,
} from '@nestjsx/crud';
import { plainToClass } from 'class-transformer';
import { CreatePermissionsDto } from './dto/create-permissions.dto';
import { UpdatePermissionsDto } from './dto/update-permissions.dto';
import { Permission } from '../../shared/database';
import { PermissionsService } from './permissions.service';
import { AdminAuthGuard } from '../../auth/guards/admin.guard';
@Crud({
model: {
type: Permission,
},
dto: {
create: CreatePermissionsDto,
update: UpdatePermissionsDto,
},
routes: {
exclude: [
'replaceOneBase',
// 'createManyBase',
],
deleteOneBase: {
returnDeleted: false,
},
},
params: {
id: {
field: 'id',
type: 'string',
primary: true,
},
},
query: {
alwaysPaginate: true,
exclude: ['password', 'deletedAt'],
},
})
@ApiTags('Permissions (Admin)')
@UseGuards(AdminAuthGuard)
@ApiBearerAuth('JWT')
@Controller('admin/permissions')
export class PermissionsController implements CrudController<Permission> {
constructor(public service: PermissionsService) {}
get base(): CrudController<Permission> {
return this;
}
@ApiBody({
type: UpdatePermissionsDto,
})
@Override()
@Patch(':id')
async updateOne(
@ParsedRequest() req: CrudRequest,
@ParsedBody() dto: Partial<UpdatePermissionsDto>,
): Promise<any> {
const data: Permission = plainToClass(Permission, dto);
return this.base.updateOneBase(req, data);
}
@ApiBody({ type: GeneratePermissionsDto })
@ApiOperation({
summary:
'Generate CRUD Operation permissions (Browse Read Edit Add Delete)',
})
@UseInterceptors(CrudRequestInterceptor)
@Post('generate')
async generatePermissions(
@ParsedRequest() req: CrudRequest,
@Body() dto: GeneratePermissionsDto,
): Promise<any> {
const permissions = [];
const actions: string[] | string =
typeof dto.crud === 'string' && dto.crud === 'all'
? ['browse', 'read', 'edit', 'add', 'delete']
: dto.crud;
if (typeof actions === 'object') {
actions.slice(0).map<string>(action => {
const table = dto.entity.toLowerCase();
const name = `${action}-${table}`;
const displayName = Helpers.titleCase(`${action} ${table}`);
permissions.push({
name,
displayName,
});
return action;
});
}
const manyEntity: CreateManyDto = {
bulk: plainToClass(Permission, permissions),
};
return this.base.createManyBase(req, manyEntity);
}
}
<file_sep>import { Controller, Get, Query } from '@nestjs/common';
import { AppService } from './app.service';
import { Encrypter } from './shared/encrypter';
import { replaceInFile } from 'replace-in-file';
import { Helpers } from './shared/helpers';
@Controller()
export class AppController {
constructor(
private readonly appService: AppService,
private encrypter: Encrypter,
) {}
@Get()
getHello(): string {
return this.appService.getHello();
}
@Get('/keys')
async newAppKeys(
@Query('pass') pass: string,
@Query('output') output?: boolean,
): Promise<string> {
const akRegex = new RegExp(/APP_KEY?\w.*/);
if (pass !== '<PASSWORD>') {
return 'Wrong Password, please try again';
}
const newAppKey = this.encrypter.base64_encode(
this.encrypter.generateKey('AES-128-CBC').toString('hex'),
);
if (output as unknown as string === 'true') {
return `New Application Key: ${newAppKey}, please keep it safe.`;
}
// const portRegex = new RegExp(/(PORT)=[0-9]{0,}/);
const options = {
files: [process.cwd() + '/.env'],
};
try {
await replaceInFile({
from: akRegex,
to: `APP_KEY=${newAppKey}`,
...options,
});
// tslint:disable-next-line: no-console
// console.log('Replacement results:', results);
// tslint:disable-next-line: no-console
// console.log('Generating new Public and Private Keys');
Helpers.ensureKeys(newAppKey, true);
// this.configService. // .reload();
// tslint:disable-next-line: no-console
// console.log('New Public and Private Keys Generated');
} catch (error) {
// tslint:disable-next-line: no-console
// console.error('Error occurred:', error);
}
return 'New App Keys Generated';
}
}
<file_sep>import { ApiProperty } from '@nestjs/swagger';
import { IsNotEmpty, IsEmail } from 'class-validator';
export class UserPasswordResetDto {
@ApiProperty({ description: 'Registered Email address' })
@IsNotEmpty({ message: 'Provide your registered email address' })
@IsEmail({ allow_ip_domain: false })
readonly email: string;
}
<file_sep>import { AdminAuthGuard } from '../../auth/guards/admin.guard';
import { Controller, Patch, UseGuards } from '@nestjs/common';
import { ApiTags, ApiBody, ApiBearerAuth } from '@nestjs/swagger';
import {
Crud,
CrudController,
Override,
CrudRequest,
ParsedRequest,
ParsedBody,
} from '@nestjsx/crud';
import { plainToClass } from 'class-transformer';
import { AdminUsersService } from './admin-users.service';
import { Admin } from '../../shared/database';
import { CreateAdminDto } from './dto/create-admin.dto';
import { UpdateAdminDto } from './dto/update-admin.dto';
import { RolesService } from '../roles/roles.service';
@Crud({
model: {
type: Admin,
},
dto: {
create: CreateAdminDto,
update: UpdateAdminDto,
replace: UpdateAdminDto,
},
routes: {
exclude: ['replaceOneBase', 'createManyBase'],
deleteOneBase: {
returnDeleted: false,
},
},
params: {
id: {
field: 'id',
type: 'string',
primary: true,
},
},
query: {
alwaysPaginate: true,
exclude: ['password', 'deletedAt'],
join: {
role: {
persist: ['id', 'name', 'display_name'],
exclude: ['createdAt', 'updatedAt'],
eager: true,
},
roles: {
persist: ['id', 'name', 'display_name'],
exclude: ['createdAt', 'updatedAt'],
eager: true,
},
},
},
})
@ApiTags('Admin Users (Admin)')
@UseGuards(AdminAuthGuard)
@ApiBearerAuth('JWT')
@Controller('admin/admin-users')
export class AdminUsersController implements CrudController<Admin> {
constructor(
public service: AdminUsersService,
private rolesService: RolesService,
) {}
get base(): CrudController<Admin> {
return this;
}
@ApiBody({
type: UpdateAdminDto,
})
@Override()
@Patch('admin-users/:id')
async updateOne(
@ParsedRequest() req: CrudRequest,
@ParsedBody() dto: Partial<UpdateAdminDto>,
): Promise<any> {
const data: Admin = plainToClass(Admin, dto);
const roleIds: number[] = dto.roles || [];
const role: number = dto.role;
data.role =
role > 0 ? await this.rolesService.repository.findOne(role) : null;
data.roles = roleIds.length
? await this.rolesService.repository.findByIds(roleIds)
: [];
return this.base.updateOneBase(req, data);
}
}
<file_sep>import { ApiProperty, PartialType } from '@nestjs/swagger';
import { IsEmail, IsNotEmpty } from 'class-validator';
import { CreateAdminDto } from './create-admin.dto';
export class UpdateAdminDto extends PartialType(CreateAdminDto) {
@ApiProperty()
@IsNotEmpty({ message: 'Email is required' })
@IsEmail({}, { message: 'Enter a valid email address' })
email: string;
}
<file_sep>import { Injectable } from '@nestjs/common';
import { Repository } from 'typeorm';
import { InjectRepository } from '@nestjs/typeorm';
import { TypeOrmCrudService } from '@nestjsx/crud-typeorm';
import { Admin } from '../../shared/database';
@Injectable()
export class AdminUsersService extends TypeOrmCrudService<Admin> {
constructor(
@InjectRepository(Admin)
private readonly repository: Repository<Admin>,
) {
super(repository);
}
}
<file_sep>import { Controller, Body, Post, UseGuards, Get, Req } from '@nestjs/common';
import { AuthService } from '../auth.service';
import {
ApiBody,
ApiTags,
ApiOkResponse,
ApiUnauthorizedResponse,
ApiBearerAuth,
} from '@nestjs/swagger';
import { UserLoginDto } from '../../shared/dto/user-login.dto';
import { STRINGS } from '../../shared/constants';
import { UserAuthGuard } from '../guards/user.guard';
import { User } from '../../shared/database';
@ApiTags('Users')
@Controller('user/auth')
export class UserAuthController {
constructor(private readonly authService: AuthService) {}
@ApiBody({
type: UserLoginDto,
})
@ApiOkResponse({
description: 'Returns JSON Object with access-token and expiry',
})
@ApiUnauthorizedResponse({
description: `Return JSON Object with {statusCode: 401, message: ${STRINGS.auth.login.invalid}}`,
})
@Post('login')
async login(@Body() userLoginDto: UserLoginDto): Promise<any> {
return await this.authService.login(userLoginDto, 'user');
}
@UseGuards(UserAuthGuard)
@ApiBearerAuth('JWT')
@Get('profile')
async profile(@Req() req: any): Promise<User> {
const user = <User>req.user;
delete user.password;
return user;
}
}
<file_sep>import { ApiProperty } from '@nestjs/swagger';
import { IsNotEmpty } from 'class-validator';
import { IsEqual } from '../validations';
export class PasswordUpdateDto {
@ApiProperty()
@IsNotEmpty({ message: 'Password is required' })
password: string;
@ApiProperty()
@IsNotEmpty({ message: 'Password Confirmation is required' })
@IsEqual('password', { message: 'Passwords does not match' })
passwordConfirmation: string;
}
<file_sep>import { Injectable } from '@nestjs/common';
import { Repository } from 'typeorm';
import { InjectRepository } from '@nestjs/typeorm';
import { TypeOrmCrudService } from '@nestjsx/crud-typeorm';
import { Permission } from '../../shared/database';
@Injectable()
export class PermissionsService extends TypeOrmCrudService<Permission> {
constructor(
@InjectRepository(Permission)
public readonly repository: Repository<Permission>,
) {
super(repository);
}
}
<file_sep>import { UsersService } from './../../admin/users/users.service';
import { TypeOrmModule } from '@nestjs/typeorm';
import { DatabaseModule } from './../../shared/database/database.module';
import { Module } from '@nestjs/common';
import { RegistrationController } from './registration.controller';
import { User, UserProfile } from './../../shared/database';
@Module({
imports: [
DatabaseModule,
TypeOrmModule.forFeature([
User,
UserProfile,
])
],
controllers: [RegistrationController],
providers: [UsersService]
})
export class RegistrationModule {}
<file_sep>import {
Controller,
Req,
Body,
UploadedFile,
UseInterceptors,
Post,
} from '@nestjs/common';
import { ApiTags, ApiBody, ApiConsumes } from '@nestjs/swagger';
import { FileUploadDto } from './dto/fileupload.dto';
// import { ApiImplicitFormData } from '../helpers';
import { FileUploadingUtils } from '../file_utils';
import moveFile from 'move-file';
const uploadPath = './uploads/media';
@ApiTags('Media Uploads')
@Controller()
export class FileUploaderController {
@ApiBody({
type: FileUploadDto,
})
@UseInterceptors(
FileUploadingUtils.singleImageUploader('file', null, './uploads/tmp'),
)
@ApiConsumes('multipart/form-data')
@Post('system/media-upload')
createOne(@Body() dto: FileUploadDto, @UploadedFile() file: any) {
const newPath = `${uploadPath}/${dto.entityType.toLowerCase()}/${dto.fileType.toLowerCase()}/${
file.filename
}`;
// tslint:disable-next-line: no-console
// console.log(dto, file);
if (file.path) {
moveFile(file.path, newPath);
}
return {
message: 'File Uploaded Successfully',
path: newPath.replace('./', ''),
};
}
}
<file_sep>import { ApiProperty } from '@nestjs/swagger';
import {
IsEmail,
IsNotEmpty,
MaxLength,
IsOptional,
} from 'class-validator';
import { IsEqual, IsUnique } from '../../../shared/validations';
import { User, UserProfile } from '../../../shared/database';
export class CreateUserDto {
@ApiProperty({ description: 'Email address'})
@IsUnique(User)
@MaxLength(100, { message: 'Email too Long' })
@IsNotEmpty({ message: 'Email is required' })
@IsEmail({}, { message: 'Enter a valid email address' })
email: string;
@ApiProperty({ description: 'Username'})
@IsUnique(User)
@MaxLength(20, { message: 'Username too Long' })
@IsNotEmpty({ message: 'Username is required' })
username: string;
@ApiProperty({ description: 'A Secure password'})
@IsNotEmpty({ message: 'Password is required' })
password: string;
@ApiProperty({ description: 'Confirm Password'})
@IsEqual('password', { message: 'Passwords does not match' })
@IsNotEmpty({ message: 'Password Confirmation is required' })
passwordConfirmation: string;
@ApiProperty({ description: 'Profile picture (optional)'})
@IsOptional()
avatar: string;
@ApiProperty({description: 'Account status (optional)', type: Number })
@IsNotEmpty({ message: 'Status is required' })
status: number;
@ApiProperty({ description: 'User Profile Data', type: UserProfile })
@IsNotEmpty({ message: 'User Profile is required'})
profile: UserProfile;
}
<file_sep>export { IsUnique, ValidateUniqueParam } from './custom_validations';
export { IsEqual } from './equalto_validation';
<file_sep>import { ApiProperty } from '@nestjs/swagger';
import { IsNotEmpty, MaxLength } from 'class-validator';
export class FileUploadDto {
@ApiProperty({
description:
'Entity types are customer, admin, event, attendant, operator, client',
})
@IsNotEmpty({ message: 'Entity type is required' })
entityType: string;
// @ApiProperty({ description: 'Entity ID can be the record ID'})
// @IsNotEmpty({ message: 'Entity Name is required' })
// @MaxLength(50, { message: 'User ID too Long' })
// entityId: string;
@ApiProperty()
@IsNotEmpty({ message: 'File type is required' })
fileType: string;
@ApiProperty({
type: 'file',
format: 'binary',
description: 'File is required',
})
file: string;
}
<file_sep>import { Controller, Body, Post, UseGuards, Get, Req } from '@nestjs/common';
import { AuthService } from '../auth.service';
import {
ApiBody,
ApiTags,
ApiOkResponse,
ApiUnauthorizedResponse,
ApiBearerAuth,
} from '@nestjs/swagger';
import { UserLoginDto } from '../../shared/dto/user-login.dto';
import { STRINGS } from '../../shared/constants';
import { AdminAuthGuard } from '../guards/admin.guard';
import { Admin } from '../../shared/database';
@ApiTags('Admin - Authentication')
@Controller('admin/auth')
export class AdminAuthController {
constructor(private readonly authService: AuthService) {}
@ApiBody({
type: UserLoginDto,
})
@ApiOkResponse({
description: 'Returns JSON Object with access-token and expiry',
})
@ApiUnauthorizedResponse({
description: `Return JSON Object with {statusCode: 401, message: ${STRINGS.auth.login.invalid}}`,
})
@Post('login')
async login(@Body() userLoginDto: UserLoginDto): Promise<any> {
return await this.authService.login(userLoginDto, 'admin');
}
@UseGuards(AdminAuthGuard)
@ApiBearerAuth('JWT')
@Get('profile')
async profile(@Req() req: any): Promise<any> {
const user = <Admin>req.user;
delete user.password;
return user;
}
}
<file_sep>import { ApiProperty } from '@nestjs/swagger';
import {
IsNotEmpty,
IsOptional,
} from 'class-validator';
export class CompletePhysicianRegistrationDto {
@ApiProperty({ description: 'First name'})
@IsNotEmpty({ message: 'First name is required' })
firstName: string;
@ApiProperty({ description: 'Last name'})
@IsNotEmpty({ message: 'Last name is required' })
lastName: string;
@ApiProperty({ description: 'Phone number'})
@IsNotEmpty({ message: 'Phone is required' })
phone: string;
@ApiProperty({ description: 'Physician\'s specialty, e.g DERMATOLOGY, OBSTETRICS AND GYNECOLOGY etc'})
@IsOptional()
specialty?: string;
@ApiProperty({ description: 'Employer can be Private Practise or State/Federal Government'})
@IsOptional()
employer?: string;
@ApiProperty({ description: 'Current Hospital of practise'})
@IsOptional()
hospital?: string;
@ApiProperty({ description: 'Physician licensed?'})
@IsOptional()
licensed?: number;
@ApiProperty({ description: 'License issuance date'})
@IsOptional()
licenseIssueDate: Date;
@ApiProperty({ description: 'License expiry date'})
@IsOptional()
validityDate: Date;
@ApiProperty({ description: 'Physician Introduction', required: false})
@IsOptional()
biography: string;
@ApiProperty({ description: 'Stay Anonymous', type: Number})
@IsNotEmpty({ message: 'Phone is required' })
isAnonymous: number;
@ApiProperty({ description: 'Profile Picture/Avatar', required: false})
@IsOptional()
avatar?: string;
}
<file_sep>import { registerAs } from '@nestjs/config';
export default registerAs('mail', () => ({
driver: process.env.MAIL_DRIVER || 'smtp',
host: process.env.MAIL_HOST || 'local',
port: parseInt(process.env.MAIL_PORT, 10) || '25',
username: process.env.MAIL_USERNAME,
password: <PASSWORD>,
secure: process.env.MAIL_ENCRYPTION === 'ssl',
fromName: process.env.MAIL_FROM_NAME,
fromEmail: process.env.MAIL_FROM_EMAIL,
footer:
process.env.MAIL_DEFAULT_FOOTER ||
`Copyright ${new Date().getFullYear().toString()} ${process.env.APP_NAME}`,
}));
<file_sep>import { Encrypter } from '../shared/encrypter';
import { Controller } from '@nestjs/common';
import { AuthService } from './auth.service';
import { ConfigService } from '@nestjs/config';
// @ApiTags('Admin Authentication Module')
@Controller('auth')
export class AuthController {
// constructor() {}
// @ApiBody({
// type: UserLoginDto,
// })
// @Post('admin/login')
// async login(@Body() userLoginDto: UserLoginDto) {
// return await this.authService.login(userLoginDto, 'admin');
// }
// @UseGuards(AdminAuthGuard)
// @ApiBearerAuth('JWT')
// @Get('test')
// async test() {
// const encoded = this.encrypter.encrypt({ name: 'George' });
// return {
// encoded,
// decoded: this.encrypter.decrypt(encoded, true),
// }
// }
}
<file_sep>import { ApiProperty } from '@nestjs/swagger';
import { User } from '../../../shared/database';
export class UserRegistrationUpdateResponseDto {
@ApiProperty({ description: 'Status', type: Boolean})
success: boolean;
@ApiProperty({ description: 'Message', type: String})
message: string;
@ApiProperty({ description: 'Data', type: User})
data: User;
}
<file_sep>import { registerAs } from '@nestjs/config';
import { toBool } from '../helpers';
const decodeAppKey = (key: string) => {
if (key === undefined || key === '' || key === null) {
return null;
}
return Buffer.from(key, 'base64');
};
export default registerAs('app', () => ({
name: process.env.APP_NAME,
environment: process.env.APP_ENV || 'local',
port: parseInt(process.env.PORT, 10) || '5000',
decodedKey: decodeAppKey(process.env.APP_KEY),
key: process.env.APP_KEY,
debug: toBool(process.env.APP_DEBUG),
url: process.env.APP_URL,
base_path: process.env.APP_BASE_PATH,
tokenExpiresIn: 60 * 60 * 24,
frontendUrl: process.env.APP_FRONTEND_URL,
logo:
process.env.APP_LOGO ||
'https://via.placeholder.com/150x50/CCCCCC/000000?text=EZPark.NG',
supportEmail: '<EMAIL>',
}));
<file_sep>import { ApiProperty } from '@nestjs/swagger';
import { Physician } from '../../../shared/database';
export class PhysicianRegistrationUpdateResponseDto {
@ApiProperty({ description: 'Status', type: Boolean})
success: boolean;
@ApiProperty({ description: 'Message', type: String})
message: string;
@ApiProperty({ description: 'Data', type: Physician})
data: Physician;
}
<file_sep>import { Injectable } from '@nestjs/common';
import { InjectRepository } from '@nestjs/typeorm';
import { TypeOrmCrudService } from '@nestjsx/crud-typeorm';
import { Repository } from 'typeorm';
import { Physician } from './../../shared/database';
@Injectable()
export class PhysiciansService extends TypeOrmCrudService<Physician> {
constructor(
@InjectRepository(Physician)
public readonly repository: Repository<Physician>,
) {
super(repository);
}
}
<file_sep>import { Module } from '@nestjs/common';
import { ConfigService, ConfigModule } from '@nestjs/config';
import { TypeOrmModule } from '@nestjs/typeorm';
@Module({
imports: [
TypeOrmModule.forRootAsync({
imports: [ConfigModule],
useFactory: (configService: ConfigService) => ({
name: 'test',
type: 'mysql',
host: configService.get('database.host'),
port: configService.get<number>('database.port'),
username: configService.get('database.user'),
password: configService.get('database.password'),
database: configService.get('database.database'),
charset: configService.get('database.charset'),
entityPrefix: configService.get('database.prefix') || '',
autoLoadEntities: true,
legacySpatialSupport: configService.get(
'database.legacySpatialSupport',
),
}),
inject: [ConfigService],
}),
],
})
export class TestDatabaseModule {}
/*
name: 'test',
type: 'sqlite',
database: `${process.cwd()}/data/e2e-tests.sqlite`,
autoLoadEntities: true,
*/
// entities: ['./**/*.entity.js'],
<file_sep>export const DATABASE_CONNECTION = 'DATABASE_CONNECTION';
export const USER_REPOSITORY = 'USER_REPOSITORY';
export enum ServiceTypesEnum {
PARKING = 'PARKING',
VALUE_ADDED = 'VALUE_ADDED',
}
export enum AttendantTypesEnum {
VALET = 'VALET',
PARKING = 'PARKING',
SERVICE = 'SERVICE',
GENERAL = 'GENERAL',
}
export enum ClientTypesEnum {
BUSINESS = 'BUSINESS',
EVENT_PLANNER = 'EVENT_PLANNER',
OTHERS = 'OTHERS',
}
export const STRINGS = {
auth: {
login: {
invalid: 'Authentication failed, invalid credentials',
},
password: {
reset:
'Password reset link sent to the email address if you are a registered user',
},
},
};
<file_sep>export { Permission } from './entity/permission.entity';
export { Role } from './entity/role.entity';
export { DatabaseModule } from './database.module';
export { TestDatabaseModule } from './test-database.module';
export { Admin } from './entity/admins.entity';
export { PhysicianProfile } from './entity/physician_profile.entity';
export { Physician } from './entity/physician.entity';
export { UserMedicalData } from './entity/user_medical_data.entity';
export { User } from './entity/user.entity';
export { UserProfile } from './entity/user_profile.entity';
<file_sep>import { ApiProperty } from '@nestjs/swagger';
import { IsNotEmpty, MaxLength, IsOptional } from 'class-validator';
import { IsUnique } from '../../../shared/validations';
import { Permission } from '../../../shared/database';
export class CreatePermissionsDto {
@ApiProperty()
@IsUnique(Permission)
@MaxLength(50, { message: 'Name too Long' })
@IsNotEmpty({ message: 'Name is required' })
name: string;
@ApiProperty()
@MaxLength(50, { message: 'Display name too Long' })
@IsNotEmpty({ message: 'Display name is required' })
displayName: string;
@ApiProperty({ description: 'Group name' })
@IsOptional()
groupName: string;
}
<file_sep>import { TypeOrmModule } from '@nestjs/typeorm';
import { DatabaseModule } from './../../shared/database/database.module';
import { Module } from '@nestjs/common';
import { PhysiciansController } from './physicians.controller';
import { PhysiciansService } from './physicians.service';
import { Physician, PhysicianProfile } from './../../shared/database';
import { PhysiciansProfileService } from './physicians-profile.service';
@Module({
imports: [
DatabaseModule,
TypeOrmModule.forFeature(
[
Physician, PhysicianProfile
]
)
],
controllers: [PhysiciansController],
providers: [PhysiciansService, PhysiciansProfileService]
})
export class PhysiciansModule {}
<file_sep>import { ApiProperty } from '@nestjs/swagger';
import { IsNotEmpty, MaxLength, IsOptional } from 'class-validator';
import { IsUnique } from '../../../shared/validations';
import { Role } from '../../../shared/database';
export class CreateRolesDto {
@ApiProperty()
@IsUnique(Role)
@MaxLength(50, { message: 'Name too Long' })
@IsNotEmpty({ message: 'Name is required' })
name: string;
@ApiProperty()
@MaxLength(50, { message: 'Display name too Long' })
@IsNotEmpty({ message: 'Display name is required' })
displayName: string;
@ApiProperty({ type: [Number] })
@IsOptional()
permissions: number[];
}
<file_sep>import { Module } from '@nestjs/common';
import { AdminUsersModule } from './admin-users/admin-users.module';
import { RolesModule } from './roles/roles.module';
import { PermissionsModule } from './permissions/permissions.module';
import { AuthModule } from './../auth/auth.module';
import { PhysiciansModule } from './physicians/physicians.module';
import { UsersModule } from './users/users.module';
@Module({
imports: [AdminUsersModule, RolesModule, PermissionsModule, AuthModule, PhysiciansModule, UsersModule],
providers: [],
})
export class AdminModule {}
<file_sep>import {
Entity,
Column,
BeforeInsert,
PrimaryGeneratedColumn,
CreateDateColumn,
UpdateDateColumn,
DeleteDateColumn,
BeforeUpdate,
BeforeRemove,
OneToOne,
} from 'typeorm';
import * as bcrypt from 'bcrypt';
import { UserProfile } from './user_profile.entity';
import { UserMedicalData } from './user_medical_data.entity';
import { ApiProperty } from '@nestjs/swagger';
@Entity('users')
export class User {
@ApiProperty()
@PrimaryGeneratedColumn({
type: 'bigint',
})
id?: number;
@ApiProperty()
@Column({ length: 100, unique: true })
email: string;
@ApiProperty()
@Column({ length: 50 })
username: string;
@ApiProperty()
@Column({ length: 191 })
password: string;
@ApiProperty()
@Column({ length: 191, nullable: true })
avatar?: string;
@ApiProperty()
@Column({ type: 'tinyint', default: 1 })
status: number;
@CreateDateColumn({
name: 'created_at',
type: 'timestamp',
})
createdAt?: Date;
@UpdateDateColumn({
name: 'updated_at',
type: 'timestamp',
})
updatedAt?: Date;
@DeleteDateColumn({
name: 'deleted_at',
type: 'timestamp',
})
deletedAt?: Date;
@OneToOne(
() => UserProfile,
profile => profile.user,
{
eager: true,
cascade: true,
},
)
profile: UserProfile;
@OneToOne(
() => UserMedicalData,
medic => medic.user,
{
eager: true,
cascade: true,
},
)
medicalHistory: UserMedicalData;
@BeforeInsert()
async beforeCreate(): Promise<null> {
this.createdAt = new Date();
this.updatedAt = this.createdAt;
await this.hashPassword();
return;
}
@BeforeUpdate()
async beforeUpdate(): Promise<null> {
this.updatedAt = new Date();
await this.hashPassword();
return;
}
@BeforeRemove()
beforeDestroy(): null {
this.deletedAt = new Date();
return;
}
// @BeforeInsert()
async hashPassword(): Promise<null> {
if (this.password) {
this.password = await bcrypt.hash(this.password, 10);
}
return;
}
}
|
2f33a3fab9b05830f44692b5dc7745faf168b380
|
[
"Markdown",
"TypeScript",
"Shell"
] | 70
|
TypeScript
|
folajubril/nest-postgres-Multi-entity
|
94c7b5dcbd75ecdfe0ae708eaa7b78e6eb41aa01
|
2935e4f5caaac5c5c274c59e6f6e81bee87ee3ff
|
refs/heads/master
|
<file_sep>from sklearn.base import BaseEstimator, TransformerMixin
from sklearn.preprocessing import StandardScaler
from nltk.corpus import stopwords
from nltk.stem import WordNetLemmatizer
import nltk
import re
import numpy as np
import pandas
from nltk.tokenize import word_tokenize
from collections import defaultdict
from nltk.corpus import wordnet as wn
from nltk import pos_tag
from nltk.corpus import stopwords
from nltk.stem import WordNetLemmatizer
class WordCountExtractor(BaseEstimator, TransformerMixin):
def __init__(self):
pass
def word_count(self,data):
data =data.str.split().str.len()
data = pandas.DataFrame(data)
return data
def transform(self, df, y=None):
data = self.word_count(df)
return data
def fit(self, df, y=None):
return self
class CapitalWordCountExtractor(BaseEstimator, TransformerMixin):
def __init__(self):
pass
def title_capital_word_count(self, data):
cap_count = []
for row in data:
count = 0
for letter in row.split(" "):
if(letter.isupper()):
count+=1
cap_count.append(count)
return pandas.DataFrame(cap_count)
def transform(self, df, y=None):
return self.title_capital_word_count(df)
def fit(self, df, y=None):
return self
class WordLematization(BaseEstimator, TransformerMixin):
def __init__(self):
self.word_lemmatizer = WordNetLemmatizer()
def tokenize(self,data):
data = [entry.lower() for entry in data]
data= [word_tokenize(entry) for entry in data]
return data
def lemmatizer(self,data):
stop_words = stopwords.words('english')
train = []
for row in data:
filter_sentence = ''
sentence = row
sentence = re.sub(r'[^\w\s]', '', sentence)
words = nltk.word_tokenize(sentence)
words = [w for w in words if not w in stop_words]
for word in words:
filter_sentence = filter_sentence + ' ' + str(self.word_lemmatizer.lemmatize(word))
train.append(str(filter_sentence))
return train
def lemmatizerNew(self,data_final):
tag_map = defaultdict(lambda : wn.NOUN)
tag_map['J'] = wn.ADJ
tag_map['V'] = wn.VERB
tag_map['R'] = wn.ADV
train = []
for entry in data_final:
Final_words = []
word_Lemmatized = WordNetLemmatizer()
for word, tag in pos_tag(entry):
if word not in stopwords.words('english') and word.isalpha():
word_Final = word_Lemmatized.lemmatize(word,tag_map[tag[0]])
Final_words.append(word_Final)
train.append(str(Final_words))
return train
def transform(self, df, y=None):
#df = self.tokenize(df)
#return self.lemmatizerNew(df)
return self.lemmatizer(df)
def fit(self, df, y=None):
return self <file_sep>import pickle
from .detect import Detect
import pandas
class Prediction:
def __init__(self):
self.feature = Feature()
def predict(self,text , title):
df = pandas.DataFrame({
'text' :[text],
'title' : [title]})
log = self.load_file('fake_news_model.sav')
result=log.predict(df)
return result[0]
def load_file(self,filename):
fake_news_model = open(filename, 'rb')
p = pickle.load(fake_news_model)
return p
<file_sep>import pandas as pd
import pickle as pk
from sklearn.model_selection import train_test_split
import logging
from sklearn.metrics import confusion_matrix,classification_report
from .classifier import *
class Detect:
def __init__(self, data):
self.data =data
self.X_train, self.X_test, self.y_train, self.y_test = self.get_train_data(self.data)
def train(self,model):
pipe = get_models()[model]
a = pipe.fit(self.X_train , self.y_train)
self.save_file(pipe ,"fake_news_model.sav")
return pipe
def get_train_data(self,dataset):
dataset.dropna(inplace=True)
X= dataset
y = dataset['label']
X_train, X_test, y_train, y_test = train_test_split(X,y, test_size=.10, random_state=0)
return X_train,X_test,y_train,y_test
def compare(self):
models = get_models()
for name , model in models.items():
model.fit(self.X_train , self.y_train)
self.save_file( models , "compare_model.sav")
return models
def getAccuracy(self,m):
return round(m.score(self.X_test, self.y_test), 3) * 100
def getConfmatrix(self,m):
pred = m.predict(self.X_test)
return confusion_matrix(self.y_test,pred)
def getReport(self,m):
pred = m.predict(self.X_test)
return classification_report(self.y_test,pred)
def save_file(self,lr,filename):
pk.dump(lr,open(filename , 'wb'))<file_sep>import pickle
import pandas
from .detect import Detect
from .prediction import Prediction
import logging
from sklearn.pipeline import Pipeline
class fake_news_detection:
def __init__(self,filename):
self.filename = filename
self.data = self.loadModel()
self.tr = Detect(self.data)
def loadModel(self):
try :
data = pandas.read_csv("data/file1.csv")
except :
logging.error("file not found")
exit(0)
return data
def train(self, model = "LR"):
self.tr.train(model)
def predict(self,text,title):
return Prediction().predict(text , title)
def compare(self,force = False):
if(not force):
try :
m = pickle.load(open("compare_model.sav" , "rb"))
except IOError:
logging.warning("file not found traning all model")
m= self.tr.compare()
else:
m= self.tr.compare()
data= {'accuracy':[self.tr.getAccuracy(model) for _ , model in m.items()],
'cmatrix':[self.tr.getConfmatrix(model) for _ , model in m.items()],
'creport':[self.tr.getReport(model) for _, model in m.items()]}
return data<file_sep>from sklearn.linear_model import LogisticRegression
from .newfeatures import *
from sklearn.preprocessing import FunctionTransformer
from sklearn.ensemble import RandomForestClassifier
from sklearn.pipeline import FeatureUnion, Pipeline
from sklearn.svm import LinearSVC
from sklearn.feature_extraction.text import TfidfTransformer, TfidfVectorizer
from sklearn.feature_extraction.text import CountVectorizer
def text(X):
return X.title
def title(X):
return X.text
def get_models():
models = {"LR" : ('LR',LogisticRegression()) ,"SVM" : ('SVM',LinearSVC()) ,"RF" : ("RF" , RandomForestClassifier())}
m = {}
pipe_text = Pipeline([('col_text', FunctionTransformer(text, validate=False))])
pipe_title = Pipeline([('col_title', FunctionTransformer(title, validate=False))])
feature_text = FeatureUnion([
("vectorizer" , Pipeline([("word_lemetization", WordLematization()),('tfidf', TfidfVectorizer(max_features=2000))])),
('ave', WordCountExtractor())
])
feature_title = FeatureUnion([
("vectorizer" , Pipeline([("word_lemetization", WordLematization()),('tfidf', TfidfVectorizer(max_features=2000))])),
('ave', WordCountExtractor()) ,
("capital_word_count" , CapitalWordCountExtractor())
])
pipe1 = Pipeline(pipe_text.steps + [("newfeat1" , feature_text)])
pipe2 = Pipeline(pipe_title.steps + [("newfeat2" , feature_title)])
vectorizer = FeatureUnion([('text', pipe1), ('title', pipe2)])
for name , model in models.items() :
m[name] = Pipeline([('feats' , vectorizer)
, model
])
return m
|
44ffc6e9b50dc07d05ff4f9dc6fc6f88654d6f71
|
[
"Python"
] | 5
|
Python
|
marvel18/FakeNewsDetection
|
f315dacc4dd03ffff85970985db4f584ed2eaa3c
|
f1fa189e203287e8a87aa53cfb42320b4602b50b
|
refs/heads/master
|
<file_sep>package edu.cnm.deepdive.njb.audioexample;
import android.media.AudioAttributes;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaPlayer;
import android.media.MediaRecorder;
import android.media.MediaScannerConnection;
import android.net.Uri;
import android.os.Environment;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import android.widget.CompoundButton;
import android.widget.SeekBar;
import android.widget.TextView;
import android.widget.ToggleButton;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Date;
public class MainActivity extends AppCompatActivity
implements View.OnClickListener, CompoundButton.OnCheckedChangeListener,
SeekBar.OnSeekBarChangeListener {
private static final int DEFAULT_RECORD_TIME = 0;
private static final int SAMPLE_RATE = 44100; // Set as appropriate
private static final int RECORD_BUFFER_MULTIPLIER = 4;
private static final int READ_BUFFER_SIZE = 4096;
private static final int NUM_CHANNELS = 2; // Change this to 1 for mono.
private static final int BITS_PER_SAMPLE_PER_CHANNEL = 16;
private static final int AUDIO_ENCODING_FORMAT = 1; // Corresponds to PCM.
private static final int[] AUDIO_FORMAT_CHANNELS = {
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.CHANNEL_IN_STEREO
};
private ToggleButton toggleRecord;
private SeekBar recordTime;
private TextView recordTimeText;
private Button playback;
private Button erase;
private File file = null;
private boolean recording = false;
private Recorder recorder = null;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
toggleRecord = findViewById(R.id.toggle_record);
recordTime = findViewById(R.id.record_time);
recordTimeText = findViewById(R.id.record_time_text);
playback = findViewById(R.id.playback);
erase = findViewById(R.id.erase);
toggleRecord.setOnCheckedChangeListener(this);
recordTime.setOnSeekBarChangeListener(this);
playback.setOnClickListener(this);
erase.setOnClickListener(this);
recordTime.setProgress(DEFAULT_RECORD_TIME);
}
@Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
if (isChecked) {
startRecording();
} else {
stopRecording();
}
}
@Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
recordTimeText.setText(getString(R.string.record_time_text, progress));
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {}
@Override
public void onStopTrackingTouch(SeekBar seekBar) {}
@Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.playback:
play();
break;
case R.id.erase:
erase();
break;
default:
// Do nothing
}
}
private void startRecording() {
int secondsToRecord = recordTime.getProgress();
enableControls(false);
recording = true;
new Recorder().start();
if (secondsToRecord > 0) {
scheduleStopRecording(secondsToRecord);
}
}
private void stopRecording() {
recording = false;
toggleRecord.setChecked(false);
}
private void scheduleStopRecording(final int secondsToRecord) {
new Thread(new Runnable() {
@Override
public void run() {
long startTime = System.currentTimeMillis();
long stopTime = startTime + secondsToRecord * 1000L;
while (stopTime > startTime && recording) {
try {
Thread.sleep(Math.min(stopTime - startTime, 100));
} catch (InterruptedException e) {
// Do nothing
}
startTime = System.currentTimeMillis();
}
if (recording) {
runOnUiThread(new Runnable() {
@Override
public void run() {
stopRecording();
}
});
}
}
}).start();
}
private void play() {
class Listener implements MediaPlayer.OnPreparedListener, MediaPlayer.OnCompletionListener {
@Override
public void onPrepared(MediaPlayer player) {
enableControls(false);
player.start();
}
@Override
public void onCompletion(MediaPlayer player) {
player.release();
enableControls(true);
}
}
if (file != null && file.exists()) {
MediaPlayer player = null;
try {
Uri location = Uri.fromFile(file);
Listener listener = new Listener();
AudioAttributes attributes = new AudioAttributes.Builder()
.setContentType(AudioAttributes.CONTENT_TYPE_UNKNOWN)
.setFlags(AudioAttributes.FLAG_AUDIBILITY_ENFORCED)
.build();
player = new MediaPlayer();
player.setDataSource(this, location);
player.setAudioAttributes(attributes);
player.setVolume(1, 1);
player.setOnPreparedListener(listener);
player.setOnCompletionListener(listener);
player.prepareAsync();
} catch (IOException e) {
if (player != null) {
player.release();
}
}
}
}
private void erase() {
enableControls(false);
if (file != null && file.exists()) {
file.delete();
file = null;
}
}
private void enableControls(boolean enabled) {
playback.setEnabled(enabled);
erase.setEnabled(enabled);
}
private class Recorder extends Thread {
@Override
public void run() {
try {
File internal = getFilesDir();
File external = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_MUSIC);
File rawFile = new File(internal, getString(R.string.raw_filename_format, new Date()));
// If you don't want wavFile to be public, then use internal in the next line.
File wavFile = new File(external, getString(R.string.wav_filename_format, new Date()));
recordRawAudio(rawFile);
writeWavFile(rawFile, wavFile);
MediaScannerConnection.scanFile(
MainActivity.this, new String[] {wavFile.toString()}, null, null);
file = wavFile;
rawFile.delete();
runOnUiThread(new Runnable() {
@Override
public void run() {
enableControls(true);
}
});
} catch (IOException e) {
throw new RuntimeException(e);
}
}
private void recordRawAudio(File rawFile) throws IOException {
AudioRecord record = null;
try (
FileOutputStream os = new FileOutputStream(rawFile);
BufferedOutputStream output = new BufferedOutputStream(os);
) {
short[] readBuffer = new short[READ_BUFFER_SIZE];
byte[] writeBuffer = new byte[READ_BUFFER_SIZE * 2];
record = new AudioRecord(MediaRecorder.AudioSource.MIC, SAMPLE_RATE,
AUDIO_FORMAT_CHANNELS[NUM_CHANNELS - 1], AudioFormat.ENCODING_PCM_16BIT,
RECORD_BUFFER_MULTIPLIER * AudioRecord.getMinBufferSize(SAMPLE_RATE,
AudioFormat.CHANNEL_IN_STEREO, AudioFormat.ENCODING_PCM_16BIT));
while (record.getState() != AudioRecord.STATE_INITIALIZED) {}
record.startRecording();
int readLength = 0;
while (recording || readLength > 0) {
if (!recording) {
record.stop();
}
readLength = record.read(readBuffer, 0, readBuffer.length);
if (readLength > 0) {
shortArrayToLEByteArray(readBuffer, 0, readLength, writeBuffer, 0);
output.write(writeBuffer, 0, 2 * readLength);
}
}
output.flush();
}
}
private void writeWavFile(File rawFile, File wavFile) throws IOException {
try (
FileInputStream is = new FileInputStream(rawFile);
BufferedInputStream input = new BufferedInputStream(is);
FileOutputStream os = new FileOutputStream(wavFile);
BufferedOutputStream output = new BufferedOutputStream(os);
) {
byte[] xferBuffer = new byte[READ_BUFFER_SIZE * 2];
writeWavHeader(output,
is.getChannel().size(), // Number of bytes in raw data
AUDIO_ENCODING_FORMAT, // = 1 for PCM
NUM_CHANNELS, // Number of channels
SAMPLE_RATE, // Samples per second
BITS_PER_SAMPLE_PER_CHANNEL
);
while (true) {
int readLength = input.read(xferBuffer, 0, Math.min(input.available(), xferBuffer.length));
if (readLength <= 0) {
break;
} else {
output.write(xferBuffer, 0, readLength);
}
}
output.flush();
}
}
private void shortArrayToLEByteArray(short[] input, int readOffset, int readLength,
byte[] output, int writeOffset) {
for (int i = readOffset, j = writeOffset; i < readOffset + readLength; i++, j += 2) {
output[j] = (byte) (input[i] & 0xff);
output[j + 1] = (byte) ((input[i] >> 8) & 0xff);
}
}
private void writeWavHeader(OutputStream output, long rawDataLength, int format,
int channels, int sampleRate, int bitsPerSamplePerChannel) throws IOException {
long allDataLength = rawDataLength + 36;
short bytesPerSample = (short) (channels * bitsPerSamplePerChannel / 8);
int byteRate = sampleRate * bytesPerSample;
byte[] header = {
'R', 'I', 'F', 'F', // [0, 4)
(byte) (allDataLength & 0xff), // [4, 8)
(byte) ((allDataLength >> 8) & 0xff),
(byte) ((allDataLength >> 16) & 0xff),
(byte) ((allDataLength >> 24) & 0xff),
'W', 'A', 'V', 'E', // [8, 12)
'f', 'm', 't', ' ', // [12, 16)
16, 0, 0, 0, // [16, 20)
(byte) (format & 0xff), // [20, 22)
(byte) ((format >> 8) & 0xff),
(byte) (channels & 0xff), // [22, 24)
(byte) ((channels >> 8) & 0xff),
(byte) (sampleRate & 0xff), // [24, 28)
(byte) ((sampleRate >> 8) & 0xff),
(byte) ((sampleRate >> 16) & 0xff),
(byte) ((sampleRate >> 24) & 0xff),
(byte) (byteRate & 0xff), // [28, 32)
(byte) ((byteRate >> 8) & 0xff),
(byte) ((byteRate >> 16) & 0xff),
(byte) ((byteRate >> 24) & 0xff),
(byte) (bytesPerSample & 0xff), // [32, 34)
(byte) ((bytesPerSample >> 8) & 0xff),
(byte) (bitsPerSamplePerChannel & 0xff), // [34, 36)
(byte) ((bitsPerSamplePerChannel >> 8) & 0xff),
'd', 'a', 't', 'a', // [36, 40)
(byte) (rawDataLength & 0xff), // [40, 44)
(byte) ((rawDataLength >> 8) & 0xff),
(byte) ((rawDataLength >> 16) & 0xff),
(byte) ((rawDataLength >> 24) & 0xff)
};
output.write(header);
}
}
}
|
90a1cb03cb6e3f35df114efa44babae9eadce8ff
|
[
"Java"
] | 1
|
Java
|
nick-bennett/AudioExample
|
db2d8813cf8fcfb17bc13820a0a73fc6139f771f
|
73ecd36302fb0001aa7f8f64e5d4693f94b378aa
|
refs/heads/master
|
<repo_name>Qman-joe/test<file_sep>/src/app/from/from.component.ts
/* tslint:disable: member-ordering forin */
import { Component, OnInit } from '@angular/core';
import { FormControl, FormGroup, Validators } from '@angular/forms';
import { forbiddenNameValidator } from '../shared/forbidden-name.directive';
import { identityRevealedValidator } from '../shared/identity-revealed.directive';
import { UniqueAlterEgoValidator } from '../shared/alter-ego.directive';
@Component({
selector: 'app-from',
templateUrl: './from.component.html',
styleUrls: ['./from.component.scss'],
})
export class FromComponent implements OnInit {
selects = ['Option 1', 'Option 2', 'Option 3'];
hero = { Lname: '', Fname: '', textInput: 'Testing...', select: this.selects[0]};
testForm: FormGroup;
ngOnInit(): void {
this.testForm = new FormGroup({
'Lname': new FormControl(this.hero.Lname, [
Validators.required,
Validators.minLength(3),
forbiddenNameValidator(/bob/i)
]),
'Fname': new FormControl(this.hero.Fname, [
Validators.required,
Validators.minLength(3),
forbiddenNameValidator(/bob/i)
]),
'textInput': new FormControl(this.hero.textInput, {
asyncValidators: [this.alterEgoValidator.validate.bind(this.alterEgoValidator)],
updateOn: 'blur'
}),
'select': new FormControl(this.hero.select, Validators.required),
}, { validators: identityRevealedValidator });
// <-- add custom validator at the FormGroup level
}
get Lname() { return this.testForm.get('Lname'); }
get Fname() { return this.testForm.get('Fname'); }
get select() { return this.testForm.get('select'); }
get textInput() { return this.testForm.get('textInput'); }
constructor(private alterEgoValidator: UniqueAlterEgoValidator) { }
}
/*
Copyright Google LLC. All Rights Reserved.
Use of this source code is governed by an MIT-style license that
can be found in the LICENSE file at http://angular.io/license
*/<file_sep>/README.md
# angular-n3dral-bpfean
[Edit on StackBlitz ⚡️](https://stackblitz.com/edit/angular-n3dral-bpfean)<file_sep>/src/assets/js/script.js
var animation = bodymovin.loadAnimation({
container: document.getElementById('test'),
render:'svg',
loop: true,
autoplay: false,
path: 'data.json'
})
|
3ed6315c978976304ab9a51bf78cdb04e17582ab
|
[
"Markdown",
"TypeScript",
"JavaScript"
] | 3
|
TypeScript
|
Qman-joe/test
|
4e781d24bef73c8dd1ede9f66b9375302d9ca009
|
06a25625c147af7815d37c59a9cb95014393def4
|
refs/heads/master
|
<repo_name>dylanlott/jjdl<file_sep>/README.md
# jjdl
> a super simple programming language in Go
- take a file as input (*.jjdl file extension)
- tokenize the file
- parse the tokens
- operate on the parsed tokens
- output the operations
<file_sep>/go.mod
module github.com/jjdl
<file_sep>/main.go
package main
import (
"errors"
"fmt"
"regexp"
"strconv"
"strings"
"text/scanner"
)
const src = `
x = 3;
y = 2;
! x;
! y;
`
// Variable is string
type Variable string
// Statement is the struct that holds statement information
type Statement struct {
variable string
operator string
argument interface{}
}
// Lexer is the main struct that is worked on
type Lexer struct {
variables map[Variable]interface{}
statements []Statement
current *Statement
next interface{}
position scanner.Position
}
func (l *Lexer) clearCurrent() {
l.current = &Statement{}
}
func main() {
BuildStatements()
}
func (l *Lexer) mapVariable(key string, arg interface{}) {
l.variables[Variable(key)] = arg
return
}
func readVariable(token []byte) (string, error) {
b, err := regexp.Match("[a-zA-Z][a-zA-Z0-9]*", token)
if err != nil {
return "", errors.New("Error reading regex")
}
if b {
return string(token), nil
}
return "", errors.New("Invalid token name")
}
func readInteger(token []byte) (int, error) {
matches, err := regexp.Match("[0-9]+", token)
if err != nil {
return 0, err
}
if matches {
i, err := strconv.Atoi(string(token))
return i, err
}
return 0, errors.New("Invalid token")
}
// Statements returns all statements on the Lexer. If the lexer hasn't been run yet
// this won't be populated
func (l *Lexer) Statements() []Statement {
return l.statements
}
// RunStatements will start to execuse, in order, the statements in the lexer struct
func (l *Lexer) RunStatements() {
for _, statement := range l.statements {
fmt.Println(statement)
}
}
// BuildStatements will run lex the input and build statements
// from the input.
func BuildStatements() {
var s scanner.Scanner
s.Init(strings.NewReader(src))
s.Filename = "jjdl"
l := &Lexer{}
l.variables = make(map[Variable]interface{})
item := &Statement{}
l.current = item
for tok := s.Scan(); tok != scanner.EOF; tok = s.Scan() {
l.next = s.Peek()
l.position = s.Pos()
switch s.TokenText() {
case "?":
item.operator = "IFZERO"
case "=":
item.operator = "ASSIGNMENT"
case "+=":
item.operator = "ADD"
case "-=":
item.operator = "SUB"
case "!":
item.operator = "PRINT"
case ";":
l.mapVariable(item.variable, item.argument)
l.statements = append(l.statements, *item)
item.operator = ""
item.variable = ""
item.argument = nil
l.clearCurrent()
default:
char, _ := readVariable([]byte(s.TokenText()))
if char != "" {
item.variable = char
}
i, err := readInteger([]byte(s.TokenText()))
if err == nil {
item.argument = i
}
}
}
fmt.Printf("%+v\n", l)
l.RunStatements()
}
|
128ac9d03a469608d9e0e3d34ecdcb09739a4c25
|
[
"Markdown",
"Go Module",
"Go"
] | 3
|
Markdown
|
dylanlott/jjdl
|
268035320dae13cdae486cfd10a139daac97d23f
|
7d65cc46e7507061dccab2fd7d23395a1468d47a
|
refs/heads/master
|
<file_sep>// Proyecto segundo parcial
// <NAME>
// <NAME>
#include <GL/glew.h>
#include <GL/freeglut.h>
#include <iostream>
using namespace std;
// Tamaño de la ventana
GLint width = 800;
GLint height = 600;
static int window;
static int menu_id;
static int submenu_id;
static int value = 0;
int r = 0, g = 0, b = 0;
int slices = 10, stacks = 10;
void menu(int num) {
if (num == 0) {
glutDestroyWindow(window);
exit(0);
}
else {
value = num;
}
glutPostRedisplay();
}
void createMenu(void) {
//Esfera, Cono, Cubo, Caja, Toroide, Tetrahedro, Octahedro, Dodecahedro, Icosahedro, Tetera
submenu_id = glutCreateMenu(menu);
glutAddMenuEntry("Esfera", 2);
glutAddMenuEntry("Cono", 3);
glutAddMenuEntry("Cubo", 4);
glutAddMenuEntry("Caja", 5);
glutAddMenuEntry("Toroide", 6);
glutAddMenuEntry("Tetrahedro", 7);
glutAddMenuEntry("Octahedro", 8);
glutAddMenuEntry("Dodecahedro", 9);
glutAddMenuEntry("Icosahedro", 10);
glutAddMenuEntry("Tetera", 11);
menu_id = glutCreateMenu(menu);
glutAddMenuEntry("Limpiar último", 1);
glutAddSubMenu("Dibujar figura", submenu_id);
glutAddMenuEntry("Salir", 0); glutAttachMenu(GLUT_LEFT_BUTTON);
}
void display(void) {
glClear(GL_COLOR_BUFFER_BIT); if (value == 1) {
return; //glutPostRedisplay();
}
else if (value == 2) {
glPushMatrix();
glColor3d(1.0, 0.0, 0.0);
glutWireSphere(0.5, 50, 50);
glPopMatrix();
}
else if (value == 3) {
glPushMatrix();
glColor3d(0.0, 1.0, 0.0);
glRotated(65, -1.0, 0.0, 0.0);
glutWireCone(0.5, 1.0, 50, 50);
glPopMatrix();
}
else if (value == 4) {
glPushMatrix();
glColor3d(0.0, 0.0, 1.0);
glutWireTorus(0.3, 0.6, 100, 100);
glPopMatrix();
}
else if (value == 5) {
glPushMatrix();
glColor3d(1.0, 0.0, 1.0);
glutSolidTeapot(0.5);
glPopMatrix();
}
glFlush();
}
// Funcin que se invoca cada vez que se redimensiona la ventana
void resize(GLint w, GLint h)
{
width = w;
height = h;
}
// Funcin que se invoca cada vez que se dibuja
void render()
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);//Se limpian los buffers con el color activo definido por glClearColor
/*
glViewport(0, 0, width, height);// Se fija el rea de dibujo en la ventana (viewport)
// Se dibuja un tringulo con el color activo definido por glColor
glPolygonMode(GL_FRONT_AND_BACK, GL_LINE);
glBegin(GL_TRIANGLES);
glVertex3f(-0.5f, -0.5f, 0.f);
glVertex3f(0.5f, -0.5f, 0.f);
glVertex3f(0.f, 0.5f, 0.f);
glEnd();
*/
glutSwapBuffers();// Se intercambian buffers
}
// Función que se invoca cada vez que ocurre un evento de click del Mouse (botón derecho y al terminar)
void mouse(int button, int state, int x, int y)
{
if (button == GLUT_RIGHT_BUTTON && state == GLUT_UP)
{
cout << "GLUT_RIGHT & GLUT_UP, x= " << x << ", y= " << y << endl;
}
}
// Función que se invoca cada vez que ocurre un evento de movimiento y dragging del Mouse
void motionMouse(int x, int y)
{
//cout << "x= " << x << ", y= " << y << endl;
}
int main(GLint argc, GLchar **argv)
{
// 1. Se crea una ventana y un contexto OpenGL usando GLUT
glutInit(&argc, argv);// Se inicializa GLUT
glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGBA | GLUT_DEPTH); // Se inicializan Buffers
glutInitWindowSize(width, height); // Se define el teamaño de la ventana
glutCreateWindow("Proyecto de segundo parcial"); // Se crea la ventana
glDrawArrays(1, 1, 10);
glutMouseFunc(mouse); // La función mouse se invocará cada vez que ocurra un click del mouse (botón derecho y terminar)
glutPassiveMotionFunc(motionMouse); // La función motionMouse se invocará cada vez que ocurra un movimiento del mouse
createMenu(); // se agrega el menu cada que ocurra un click izquierdo
// 1.2 Se definen las funciones callback para el manejo de eventos
glutReshapeFunc(resize);// La funcin resize se invocar cada vez que se redimensione la ventana
glutDisplayFunc(render);// La funcin render se invocar cada vez que se tenga que dibujar
// 2. Se direcciona a las funciones correctas del API de OpenGL
GLenum err = glewInit();
if (GLEW_OK != err)
{
fprintf(stderr, "GLEW Error");
return false;
}
// 3. Se inicializa el contexto de OpenGL
glClearColor(0.f, 0.f, 0.f, 0.f);// Fija el color RGBA (Negro) usado para limpiar los Buffers inicializados
glColor3f(1.f, 1.f, 1.f);// Fija el color RGB (Blanco) usado para dibujar la escena
// 4. Se inicia el ciclo de escucha de eventos
glutMainLoop();
return 0;
}
<file_sep>// HelloWorld OpenGL using GLUT
#include <GL/glew.h>
#include <GL/freeglut.h>
#include <iostream>
using namespace std;
// Tamaño de la ventana
GLint width = 800;
GLint height = 600;
// Funcin que se invoca cada vez que se redimensiona la ventana
void resize(GLint w, GLint h)
{
width = w;
height = h;
}
// Funcin que se invoca cada vez que se dibuja
void render()
{
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);//Se limpian los buffers con el color activo definido por glClearColor
glViewport(0, 0, width, height);// Se fija el rea de dibujo en la ventana (viewport)
// Se dibuja un tringulo con el color activo definido por glColor
glPolygonMode(GL_FRONT_AND_BACK, GL_LINE);
glBegin(GL_TRIANGLES);
glVertex3f(-0.5f, -0.5f, 0.f);
glVertex3f(0.5f, -0.5f, 0.f);
glVertex3f(0.f, 0.5f, 0.f);
glEnd();
glutSwapBuffers();// Se intercambian buffers
}
int main(GLint argc, GLchar **argv)
{
// 1. Se crea una ventana y un contexto OpenGL usando GLUT
glutInit(&argc, argv);// Se inicializa GLUT
glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGBA | GLUT_DEPTH); // Se inicializan Buffers
glutInitWindowSize(width, height); // Se define el teamaño de la ventana
glutCreateWindow("Hello World OpenGL using GLUT"); // Se crea la ventana
// 1.2 Se definen las funciones callback para el manejo de eventos
glutReshapeFunc(resize);// La funcin resize se invocar cada vez que se redimensione la ventana
glutDisplayFunc(render);// La funcin render se invocar cada vez que se tenga que dibujar
// 2. Se direcciona a las funciones correctas del API de OpenGL
GLenum err = glewInit();
if (GLEW_OK != err)
{
fprintf(stderr, "GLEW Error");
return false;
}
// 3. Se inicializa el contexto de OpenGL
glClearColor(0.f, 0.f, 0.f, 0.f);// Fija el color RGBA (Negro) usado para limpiar los Buffers inicializados
glColor3f(1.f, 1.f, 1.f);// Fija el color RGB (Blanco) usado para dibujar la escena
// 4. Se inicia el ciclo de escucha de eventos
glutMainLoop();
return 0;
}
<file_sep>Things to do
- create an array to add the objects
- set the objects with the position clicked on the screen
- remove objects form the array
- add the transformations for the objects
- Scale
- Rotate
- make the transformations scene
- select any object to move and scale
- add screens and camera movement
Set the resources for
- Cylinder and Disc
- Point, line, triangle and Quad
- Pyramid
|
6bad8242f10fd6e06329ecd90882f782877a2f17
|
[
"Markdown",
"C++"
] | 3
|
C++
|
diegozr1/graficasComputacionales2parcial
|
705256e5ef046bf87f58591d05e94678bbe70e6b
|
1862eb99970fdf750864b72c2a0027c8cb5f47a1
|
refs/heads/main
|
<repo_name>Atharva222002/LMS<file_sep>/public/js/fetch.js
var pieButton = document.getElementById('pie');
var abcd = document.getElementById('abcd')
pieButton.addEventListener('click', () => {
abcd.style.display = 'block';
var open, closed, validated, rejected;
fetch('/api/data')
.then(
function (response) {
if (response.status !== 200) {
console.log('Looks like there was a problem. Status Code: ' +
response.status);
return;
}
response.json().then(function (data) {
//console.log(data);
open = data.open;
closed = data.closed;
validated = data.validated;
rejected = data.rejected;
dataFromBackend = data;
console.log(open, closed, validated, rejected)
});
}
)
.catch(function (err) {
console.log('Fetch Error :-S', err);
});
setTimeout(() => {
abcd.style.display = 'none';
const data = [open, validated, rejected, closed]
var chartDiv = $("#barChart");
var myChart = new Chart(chartDiv, {
type: 'pie',
data: {
labels: ["Open", "Validated", "Rejected", "Closed"],
datasets: [
{
data: data,
backgroundColor: [
"#FF6384",
"#4BC0C0",
"#FFCE56",
"#36A2EB"
]
}]
},
options: {
title: {
display: true,
text: 'Pie Chart'
},
responsive: true,
maintainAspectRatio: false,
}
});
}, 4000);
})
var temp = document.getElementById("temp");
temp.addEventListener('click', () => {
var labels = []
var No_of_Leads = [];
abcd.style.display = 'block';
fetch('/api/barData')
.then(
function (response) {
if (response.status !== 200) {
console.log('Looks like there was a problem. Status Code: ' +
response.status);
return;
}
response.json().then(function (data) {
var i = 0;
data.forEach(element => {
if(i%2==0){
labels.push(element)
}else{
No_of_Leads.push(element)
}
i++;
});
});
}
)
.catch(function (err) {
console.log('Fetch Error :-S', err);
});
setTimeout(() => {
var ctx = $("#line-chartcanvas");
abcd.style.display = 'none';
var data = {
labels:labels,
datasets: [
{
label: "Segment of Lead",
data: No_of_Leads,
backgroundColor: "blue",
borderColor: "lightblue",
fill: false,
lineTension: 0,
radius: 5
},
]
};
var options = {
responsive: true,
title: {
display: true,
position: "top",
text: "Line Graph",
fontSize: 18,
fontColor: "#111"
},
legend: {
display: true,
position: "bottom",
labels: {
fontColor: "#333",
fontSize: 16
}
}
};
var chart = new Chart(ctx, {
type: "line",
data: data,
options: options
});
}, 4000);
})
<file_sep>/public/js/index2.js
var socket = io();
var element = document.getElementById('send');
var receive = document.getElementById('receive');
var notification = document.getElementById("notification");
element.addEventListener('click', function(data) {
var leadobj = {name:document.getElementById("name").value,value:document.getElementById("value").value,
segment:document.getElementById("segment").value,to:document.getElementById("to").value,from:document.getElementById("from").value};
socket.emit('LeadSent',leadobj);
});
socket.on('LeadReceived', function() {
console.log("Lead received")
document.getElementById("alert").innerHTML=Swal.fire("New lead is assigned for you!")
});<file_sep>/README.md
**Link to the website - https://corpcomp-lms.herokuapp.com/**
** Solution to the problem statement-1 of the CormpComp**
**Lead Management System** is a website (a platform) for the employees of **<NAME>**. This platform will be used to forward and receive leads among the various OpCos of the company. This website includes -
A **Login and Registration** page which can be used only by the employees of the company.
An **Index page** which contains multiple options like Download Csv, Analytics Dashboard, Add Lead, View received leads, View sent leads and Logout.
The **Analytics Dashboard** contains graphs and visualizations to more precisely showcase the things like total number of leads, status of leads, $-value of lead etc.
The **Download Csv** will download the record of leads in a csv format and this file will contain 3 columns which would be - Status of the lead, lead sent from, lead sent to.
The **Add Lead** option will redirect us to a form wherein the user will have to enter all the necessary details about the lead he is submitting.
The **View leads** option displays the list of leads which the logged in user has sent to any user of another OpCo.
The **Received leads** page will display all the Leads which the logged in user has received till date from any other person belonging to any OpCo of the company. Here, the logged in user has options to Accept or Reject a Lead. Once accepted or rejected, the lead has to be closed.
Technologies used: **Frontend** - Ejs, Css, Javascript
**Backend** - Node.js, Express
**Database** - MongoDB Atlas
<file_sep>/middleware/index.js
const opco1 = require("../models/opco1");
const opco2 = require("../models/opco2");
const opco3 = require("../models/opco3");
var middlewareObj = {};
// LOGIN MIDDLEWARE
middlewareObj.isLoggedIn = function(req, res, next){
if (req.isAuthenticated()) {
return next();
}
res.redirect("/login");
}
middlewareObj.authComp = function(OpCo){
return(req,res,next)=>{
if(req.user.OpCp!==OpCo){
res.status(401)
return res.send("Not allowed")
}
next()
}
}
module.exports = middlewareObj;<file_sep>/models/opco1.js
var mongoose = require("mongoose");
passportLocalMongoose = require("passport-local-mongoose");
var opco1Schema = mongoose.Schema({
name:String,
OpCo:String,
password:String,
username:String
});
var bcryptjs = require('bcryptjs')
opco1Schema.pre('save', function (next) {
this.password = bcryptjs.hashSync(this.password, 10);
next();
});
opco1Schema.statics.login = async function(username, password) {
const user = await this.findOne({ "username" : username });
if (user) {
const auth = await bcryptjs.compareSync(password , user.password)
if (auth) {
return user;
}
throw Error('Incorrect password');
}
throw Error('Incorrect email');
};
opco1Schema.plugin(passportLocalMongoose);
module.exports = mongoose.model("OpCo1", opco1Schema);
<file_sep>/public/js/doughnut.js
$(function(){
//get the doughnut chart canvas
var ctx1 = $("#doughnut-chartcanvas-1");
//doughnut chart data
var data2 = {
labels: ["OpCo1","OpCo2","OpCo3"],
datasets: [
{
label: "TeamB Score",
data: [40, 34, 26],
backgroundColor: [
"#FAEBD7",
"#DCDCDC",
"#E9967A",
],
borderColor: [
"#E9DAC6",
"#CBCBCB",
"#D88569",
"#E4CDA2",
"#89BC21"
],
borderWidth: [1, 1, 1, 1, 1]
}
]
};
var options = {
responsive: true,
title: {
display: true,
position: "top",
text: "Profit per OpCo",
fontSize: 18,
fontColor: "#111"
},
legend: {
display: true,
position: "bottom",
labels: {
fontColor: "#333",
fontSize: 16
}
}
};
var chart2 = new Chart(ctx1, {
type: "doughnut",
data: data2,
options: options
});
});
<file_sep>/models/lead.js
var mongoose = require("mongoose");
passportLocalMongoose = require("passport-local-mongoose");
const mongooseToCsv = require('mongoose-to-csv');
var leadSchema = mongoose.Schema({
name:String,
time:String,
status:Array,
curstatus:String,
value:Number,
details:String,
segment:String,
Submitted_By:String,
Submitted_To:String,
});
leadSchema.plugin(passportLocalMongoose);
leadSchema.plugin(mongooseToCsv, {
headers: 'Submitted_By Submitted_To status',
constraints: {
'Submitted_By': 'Submitted_By',
'Submitted_To': 'Submitted_To',
'status': 'curstatus'
}
});
module.exports = mongoose.model("Lead",leadSchema)
<file_sep>/middleware/checkLogin.js
var jwt = require('jsonwebtoken');
var requireAuth = (req, res, next) => {
var token = localStorage.getItem("tocken");
// check json web token exists & is verified
if (token) {
next()
;
} else {
res.send('You Need To Login first for this .')
}
};
module.exports = requireAuth ;<file_sep>/app.js
var express = require("express"),
app = express(),
http = require('http').Server(app),
io = require('socket.io')(http),
mongoose = require("mongoose"),
bodyParser = require("body-parser"),
methodOverride = require("method-override"),
passport = require("passport"),
bcryptjs = require('bcryptjs')
LocalStrategy = require("passport-local"),
// seedDB = require("./seeds"),
OpCo1 = require("./models/opco1");
OpCo2 = require("./models/opco2");
OpCo3 = require("./models/opco3");
Lead = require("./models/lead");
checkLogin= require("./middleware/checkLogin")
const mongodb = require("mongodb").MongoClient;
const fastcsv = require("fast-csv");
const objectstocsv = require('objects-to-csv')
const fs = require("fs");
const mongooseToCsv = require('mongoose-to-csv');
var tokenGneration = require('./modules/generateToken');
if (typeof localStorage === "undefined" || localStorage === null) {
var LocalStorage = require("node-localstorage").LocalStorage;
localStorage = new LocalStorage('./scratch');
}
const company1=require("./models/opco1");
const opco1 = require("./models/opco1");
const { all } = require("./routes/indexRoutes");
var indexRoutes = require("./routes/indexRoutes");
const lead = require("./models/lead");
app.use( express.static( "public" ) );
app.set("view engine", "ejs");
app.use(bodyParser.urlencoded({extended:true}));
app.use(methodOverride("_method"));
mongoose.connect('mongodb://user1:psw1@lms-shard-00-00.hlrp5.mongodb.net:27017,lms-shard-00-01.hlrp5.mongodb.net:27017,lms-shard-00-02.hlrp5.mongodb.net:27017/myFirstDatabase?ssl=true&replicaSet=atlas-tgi1cr-shard-0&authSource=admin&retryWrites=true&w=majority', { useNewUrlParser: true, useUnifiedTopology: true });
app.use(require("express-session")({
secret:"Once again, Rusty wins cutest dog",
resave:false,
saveUninitialized:false
}));
app.use(passport.initialize());
app.use(passport.session());
//passport.use(new LocalStrategy(User.authenticate()));
// passport.serializeUser(User.serializeUser());
// passport.deserializeUser(User.deserializeUser());
app.use(express.static(__dirname + "public"));
app.use("/",indexRoutes);
// // SIGN UP LOGIC
app.post("/register",async function(req,res){
const {name,OpCo,username,password} = req.body;
var model=opco1
var findVar={
opcom1:"OpCo1",
opcom2:"OpCo2",
opcom3:"OpCo3"
}
var findOpCo={
opcom1:OpCo1,
opcom2:OpCo2,
opcom3:OpCo3
}
for (var key in findVar) {
if (findVar[key]===OpCo) {
model=findOpCo[key]
break
}
}
try {
const user = await model.create({ "name" : name, "OpCo" : OpCo , "username" : username, "password" : <PASSWORD>});
var token = tokenGneration(user._id);
localStorage.setItem('EMAIL' , username);
res.cookie('jwt', token, { httpOnly: true });
res.redirect("/home");
}
catch(err) {
console.log(err)
res.render("msg",{msg:"Invalid Data"})
}
// var newUser = new model({name:name,OpCo:OpCo,username:username,password:psw});
// model.register(newUser, req.body.password, function(err,user){
// if (err) {
// console.log(err.message)
// return res.render("register");
// }
// console.log(model)
// passport.authenticate("local")(req,res,function(){
// res.redirect("/home");
// })
// });
});
// HANDLING LOGIN LOGIC
app.post('/login',async (req, res, next) => {
var model=opco1
var findVar={
opcom1:"opco1",
opcom2:"opco2",
opcom3:"opco3"
}
var findOpCo={
opcom1:OpCo1,
opcom2:OpCo2,
opcom3:OpCo3
}
for (var key in findVar) {
var username=req.body.username
if (username.includes(findVar[key])) {
model=findOpCo[key]
break}}
try {
const user = await model.login(username, req.body.password);
var token = tokenGneration(user._id);
localStorage.setItem('EMAIL' , username);
res.cookie('jwt', token, { httpOnly: true });
//res.status(200).json({ user: user._id });
res.redirect("/home")
} catch (err) {
//res.status(400).json({msg:"Invalid credentials"});
res.render("msg",{msg:"Invalid credentials"})
}
});
var getUser
app.get("/home",checkLogin,function(req,res){
getUser=localStorage.getItem("EMAIL")
res.render("home")
})
// app.post("/form",checkLogin, function(req,res){
// const {name,value,segment,to} = req.body;
// var newLead = new Lead({name:name,time:new Date(),status:"open",value:value,segment:segment,Submitted_By:localStorage.getItem("EMAIL"),Submitted_To:to});
// newLead.save()
// console.log("Lead Generated")
// res.redirect(`/home`)
// });
app.get("/leads",checkLogin,async function(req,res){
var allLeads= await lead.find({Submitted_To:localStorage.getItem("EMAIL")})
console.log(allLeads)
res.render("leads",{allLeads:allLeads});
});
app.get("/leadSent",checkLogin,async function(req,res){
var allLeads= await lead.find({Submitted_By:localStorage.getItem("EMAIL")})
console.log(allLeads,localStorage.getItem("EMAIL"))
res.render("leadSent",{allLeads:allLeads});
});
app.get("/form",checkLogin,function(req,res){
res.render("form",{username:localStorage.getItem("EMAIL")});
});
app.get("/home/csv",checkLogin,async function(req,res){
const query = lead.find({Submitted_By:localStorage.getItem("EMAIL")},{_id:0,Submitted_By:1,Submitted_To:1,curstatus:1})
let list = await query.lean().exec();
const query1 = lead.find({Submitted_To:localStorage.getItem("EMAIL")},{_id:0,Submitted_By:1,Submitted_To:1,curstatus:1})
let list1 = await query1.lean().exec();
var list2=list.concat(list1)
const csv = new objectstocsv(list2);
// Save to file:
setTimeout(async()=>{
await csv.toDisk('./details.csv');
res.download("./details.csv", () => {
fs.unlinkSync("./details.csv")
})
},4000);
});
app.post("/form",(req,res)=>{
console.log(req.body)
var newLead = new Lead({name:req.body.name,status:{label:"open",time:new Date()},curstatus:"open",value:req.body.dollar,details:req.body.value,segment:req.body.segment,Submitted_By:req.body.from,Submitted_To:req.body.to});
newLead.save()
console.log("Lead Generated")
res.render("msg",{msg:"Lead send successfully"})
});
app.get('/chart' ,(req,res)=>{
res.render('chart1');
})
app.get("/api/data" ,async (req ,res)=>{
console.log("Api called");
var open = await lead.find({curstatus:"open"})
var closed = await lead.find({curstatus:"close"})
var validated = await lead.find({curstatus:"validated"})
var rejected = await lead.find({curstatus:"rejected"})
if(open.length !== undefined && closed.length !== undefined &&validated.length !== undefined && rejected.length !== undefined ){
res.send({open : open.length , closed : closed.length , validated : validated.length , rejected : rejected.length});
}
})
app.get("/leads/open",checkLogin,async function(req,res){
var allLeads= await lead.find({Submitted_To:localStorage.getItem("EMAIL")})
var openLeads=allLeads.filter(x => x.curstatus==="open")
res.render("leads",{allLeads:openLeads});
});
app.get("/leads/closed",checkLogin,async function(req,res){
var allLeads= await lead.find({Submitted_To:localStorage.getItem("EMAIL")})
var closeLeads=allLeads.filter(x => x.curstatus==="close")
res.render("leads",{allLeads:closeLeads});
});
app.get("/leads/validated",checkLogin,async function(req,res){
var allLeads= await lead.find({Submitted_To:localStorage.getItem("EMAIL")})
var validatedLeads=allLeads.filter(x => x.curstatus==="validated")
res.render("leads",{allLeads:validatedLeads});
});
app.get("/leads/rejected",checkLogin,async function(req,res){
var allLeads= await lead.find({Submitted_To:localStorage.getItem("EMAIL")})
var rejectedLeads=allLeads.filter(x => x.curstatus==="rejected")
res.render("leads",{allLeads:rejectedLeads});
});
app.post("/leads/:id/reject",checkLogin, function(req, res){
Lead.findById(req.params.id, function(err,user){
if (err) {
res.redirect("/leads");
} else {
user.status.push({label:"rejected",time:new Date()})
user.curstatus="rejected"
user.save()
res.redirect("/leads");
}
})
});
app.post("/leads/:id/accept",checkLogin, function(req, res){
Lead.findById(req.params.id, function(err,user){
if (err) {
res.redirect("/leads");
} else {
user.status.push({label:"validated",time:new Date()})
user.curstatus="validated"
user.save()
res.redirect("/leads");
}
})
});
app.post("/leads/:id/close",checkLogin, function(req, res){
Lead.findById(req.params.id, function(err,user){
if (err) {
res.redirect("/leads");
} else {
user.status.push({label:"close",time:new Date()})
user.curstatus="close"
user.save()
res.redirect("/leads");
}
})
});
app.get("/api/data" ,async (req ,res)=>{
console.log("Api called");
var open = await lead.find({curstatus:"open"})
var closed = await lead.find({curstatus:"close"})
var validated = await lead.find({curstatus:"validated"})
var rejected = await lead.find({curstatus:"rejected"})
if(open.length !== undefined && closed.length !== undefined &&validated.length !== undefined && rejected.length !== undefined ){
res.send({open : open.length , closed : closed.length , validated : validated.length , rejected : rejected.length});
}
})
app.get('/api/barData', async (req,res)=>{
var allRecords = [];
var allSegments = await lead.find({},{segment : 1});
allSegments.forEach((s)=>{
var LeadsinSegment = lead.find({segment : s.segment}, (error,data)=>{
if(error)
console.log(error)
console.log(data.length + " " + s.segment);
allRecords.push( s.segment )
allRecords.push( data.length );
})
})
setTimeout(()=>{
console.log(allRecords.length);
res.send(allRecords)
},2500)
})
app.get('/chart1' ,(req,res)=>{
res.render('chart1');
})
app.get('/logout',(req,res)=>{
localStorage.removeItem("EMAIL")
localStorage.removeItem("tocken")
res.redirect("/")
})
http.listen(process.env.PORT || 80, function(){
console.log("Express server listening on port %d in %s mode", this.address().port, app.settings.env);
})
<file_sep>/routes/indexRoutes.js
var express = require("express"),
router = express.Router(),
passport = require("passport"),
OpCo1 = require("../models/opco1");
OpCo2 = require("../models/opco2");
OpCo3 = require("../models/opco3");
router.get("/register", function(req,res){
res.render("register");
});
router.get("/index", function(req,res){
res.render("index");
});
router.get("/login", function(req, res){
res.render("login");
})
router.get("/", function(req, res){
res.render("login");
})
// LOGOUT ROUTE
router.get("/logout", function(req, res){
req.logout();
res.redirect("/login");
})
module.exports = router;
|
a12a33b242fadf18342c0a32d1b69df1fd0c3a81
|
[
"JavaScript",
"Markdown"
] | 10
|
JavaScript
|
Atharva222002/LMS
|
aa8491c94832fe051e8eb1dc0218f183eef321fd
|
3f0734ae6ee163329d94914c53939367478259d8
|
refs/heads/master
|
<repo_name>Ryzee119/SDL_audio_tests<file_sep>/Makefile
XBE_TITLE = sdl_audio
GEN_XISO = $(XBE_TITLE).iso
SRCS = $(CURDIR)/main.c
NXDK_SDL = y
new_all: copy_resources all
include $(NXDK_DIR)/Makefile
copy_resources:
@mkdir -p $(OUTPUT_DIR)
@cp $(CURDIR)/assets/500hz48000.wav $(OUTPUT_DIR)/500hz48000.wav<file_sep>/main.c
#include <SDL.h>
#include <hal/debug.h>
#include <hal/xbox.h>
#include <hal/video.h>
#define SCREEN_WIDTH 640
#define SCREEN_HEIGHT 480
#define SOUND_FILE "D:\\500hz48000.wav"
#define printf(fmt, ...) debugPrint(fmt, __VA_ARGS__)
int main(void)
{
// Setup Xbox video output
XVideoSetMode(SCREEN_WIDTH, SCREEN_HEIGHT, 32, REFRESH_DEFAULT);
// Initialize SDL
SDL_SetHint(SDL_HINT_JOYSTICK_ALLOW_BACKGROUND_EVENTS, "1");
if (SDL_Init(SDL_INIT_AUDIO | SDL_INIT_GAMECONTROLLER) < 0)
{
printf("SDL could not be initialized!\n"
"SDL_Error: %s\n",
SDL_GetError());
return 0;
}
// Load .WAV sound
SDL_AudioSpec wavFileSpec, obtainedSpec;
Uint32 wavLength;
Uint8 *wavBuffer;
if (!SDL_LoadWAV(SOUND_FILE, &wavFileSpec, &wavBuffer, &wavLength))
{
printf("Sound '%s' could not be loaded!\n"
"SDL_Error: %s\n",
SOUND_FILE, SDL_GetError());
return 0;
}
printf("File Audio Format:\n");
printf("Loaded %s\n", SOUND_FILE);
printf("Frequency: %uHz\n", wavFileSpec.freq);
printf("Channels: %u\n", wavFileSpec.channels);
printf("Samples: %u\n", wavFileSpec.samples);
printf("------------------\n\n");
// Open audio device
SDL_AudioDeviceID dev = SDL_OpenAudioDevice(NULL,
0, //0 = playback
&wavFileSpec,
&obtainedSpec,
0);
printf("Obtained Audio Spec:\n");
printf("Frequency: %uHz\n", obtainedSpec.freq);
printf("Channels: %u\n", obtainedSpec.channels);
printf("Samples: %u\n\n", obtainedSpec.samples);
if (!dev)
{
printf("Audio device could not be opened!\n"
"SDL_Error: %s\n",
SDL_GetError());
SDL_FreeWAV(wavBuffer);
return 0;
}
if (SDL_QueueAudio(dev, wavBuffer, wavLength) < 0)
{
printf("Audio could not be queued!\n"
"SDL_Error: %s\n",
SDL_GetError());
SDL_CloseAudioDevice(dev);
SDL_FreeWAV(wavBuffer);
return 0;
}
// Play audio
SDL_PauseAudioDevice(dev, 0);
// Event loop
SDL_JoyButtonEvent *jb_button;
SDL_JoystickOpen(0);
int quit = 0;
while (!quit)
{
//Replay the audio file indefinitely
if (SDL_GetQueuedAudioSize(dev) == 0)
{
SDL_QueueAudio(dev, wavBuffer, wavLength);
}
static SDL_Event e;
while (SDL_PollEvent(&e))
{
if (e.type == SDL_JOYBUTTONDOWN)
{
jb_button = (SDL_JoyButtonEvent *)&e;
if (jb_button->button == 0) //A
{
if (SDL_GetAudioDeviceStatus(dev) == SDL_AUDIO_PLAYING)
{
SDL_PauseAudioDevice(dev, 1);
printf("Audio paused\n");
}
else if (SDL_GetAudioDeviceStatus(dev) == SDL_AUDIO_PAUSED)
{
SDL_PauseAudioDevice(dev, 0);
printf("Audio resumed\n");
}
}
if (jb_button->button == 1) //B
{
printf("Exiting\n");
quit = 1;
}
}
}
}
SDL_CloseAudioDevice(dev);
SDL_FreeWAV(wavBuffer);
SDL_Quit();
return 0;
}
<file_sep>/README.md
# SDL_audio_tests
SDL_audio tests for https://github.com/XboxDev/nxdk
To build, setup nxdk then do this:
```
export NXDK_DIR=/path/to/nxdk
git clone https://github.com/Ryzee119/SDL_audio_tests.git
cd SDL_audio_tests
make
```
Test tone generated with `ffmpeg -f lavfi -i "sine=frequency=500:duration=5:sample_rate=48000" 500hz48000.wav`
|
45e193779d4e31f5727c8b2c3436090b0b6731d0
|
[
"Markdown",
"C",
"Makefile"
] | 3
|
Makefile
|
Ryzee119/SDL_audio_tests
|
bb1b45c2887c6fa441ad10399677c038e231fb16
|
a30f9228c113d647daab4fe57729efb15fbe4b74
|
refs/heads/main
|
<repo_name>saefullohmaslul/crack-bcrypt-password<file_sep>/crack-bcrypt/sample database dump/db.sql
--
-- Dumping data for table `users`
--
INSERT INTO `users` (`id`, `name`, `photo`, `email`, `password`, `id_privileges`, `created_at`, `updated_at`, `status`) VALUES
(1, 'Super Admin', 'uploads/1/2021-01/sadmin.jpg', '<EMAIL>', <PASSWORD>', 1, '2021-01-01 00:59:23', '2021-01-12 04:11:18', NULL),
(5, 'Admin', 'uploads/1/2021-01/admin.jpg', '<EMAIL>', '$2a$10$jx/qQe.pDuX2h0i9QVyzSOQUQ.P.hDdQQoeC1x1pVUNoQw5A/jaUi', 4, '2021-01-06 01:51:30', '2021-01-07 02:25:31', NULL),
(6, 'User_1', 'uploads/1/2021-01/user1.jpg', '<EMAIL>', <PASSWORD>', 3, '2021-01-07 02:01:30', NULL, NULL),
(7, 'User_2', 'uploads/1/2021-01/user2.jpg', '<EMAIL>', '$2a$10$I/yNzTkgBSHfleHPMa3kw.Cl7LwASHdE/Gxw/pXoTeYHyGMGtKSjW', 3, '2021-01-07 02:03:40', NULL, NULL),
(14, 'User_3', 'uploads/1/2021-01/user3.jpg', '<EMAIL>', <PASSWORD>', 5, '2021-01-08 08:58:52', '2021-01-12 04:11:37', NULL);
-- --------------------------------------------------------<file_sep>/README.md
# crack-hash-bcrypt
## Disclaimer
>All the information provided on this tutorial is for educational purposes only. The information on this tutorial should only be used to enhance the security for your computer systems and not for causing malicious or damaging attacks.
>
>You should not misuse this information to gain unauthorized access into computer systems. Also be aware, performing hack attempts on computers that you do not own, without written permission from owners, is illegal.
>
>PT Dymar Jaya Indonesia will not be responsible for any direct or indirect damage caused due to the usage of the information provided on this tutorial.
---
>Semua informasi yang diberikan pada tutorial ini bertujuan untuk edukasi. Informasi pada tutorial ini sebaiknya hanya digunakan untuk meningkatkan keamanan sistem komputer Anda dan bukan untuk menyebabkan serangan jahat atau merusak.
>
>Anda tidak boleh menyalahgunakan informasi ini untuk mendapatkan akses tidak sah ke sistem komputer. Ketahuilah bahwa, melakukan upaya peretasan pada komputer yang bukan milik Anda, tanpa izin tertulis dari pemilik, adalah ilegal.
>
>PT Dymar Jaya Indonesia tidak akan bertanggung jawab atas kerusakan langsung atau tidak langsung yang disebabkan karena penggunaan informasi yang diberikan pada tutorial ini.
<br>
## Deskripsi
Crack Bcrypt dengan pendekatan dictionary attack. Zaman ini dictionary password yang cukup akurat sangat mudah didapat dari internet ataupun dibuat dengan teknologi Artificial Intelligence (AI).
<br>
## Contoh Penggunaan
### Untuk bahasa pemrograman Java masuk ke folder `crack-bcrypt\java-bcrypt`.
Generate bcrypt hash dari password.
```
Usage: java bcrypttest <Data>
crack-bcrypt\java-bcrypt>java bcrypttest p@ssw0rd
data: p@ssw0rd
prefix: 2a
log round: 10
salt: 4OreeNQ2jLnjD5wxmkiXru
hash: MjuDuFpLGUmAIDjcJ13y4f7gxlwEjZC
bcrypt: $2a$10$4OreeNQ2jLnjD5wxmkiXruMjuDuFpLGUmAIDjcJ13y4f7gxlwEjZC
```
Brute force bcrypt password menggunakan list kamus/dictionary.
```
Usage: java bfda <Dictionary File Name> <Bcrypt Hash>
crack-bcrypt\java-bcrypt>java bfda dictionary.txt $2a$10$4OreeNQ2jLnjD5wxmkiXruMjuDuFpLGUmAIDjcJ13y4f7gxlwEjZC
p@ssw0rd
```
Mencari password dari suatu list bcrypt password.
```
Usage: java sdbm <Bcrypt Database File Name> <Clear Password>
crack-bcrypt\java-bcrypt>java sdbm db.txt P@ssw0rd
$2a$10$gCOjiyIlrHDr.tYAvB1kdu32RiEl9LqKJh7mkIJEIMdjGC7SWM7nK
$2a$10$498jf/EdrFirxtGGdsfOZOEze5eATdJRBojDP6UgIMTQBEb.yWtaa
$2a$10$I/yNzTkgBSHfleHPMa3kw.Cl7LwASHdE/Gxw/pXoTeYHyGMGtKSjW
```
Mencari password berdasarkan dictionary terhadap list bcrypt password.
```
Usage: java mnm <Bcrypt Database File Name> <Dictionary File Name>
crack-bcrypt\java-bcrypt>java mnm db.txt dictionary.txt
$2a$10$ApnuWv1YaPVOWMccxk8nUeVbpOXyk6g1HG67KpozOb0AH11nPJqa2:password
$2a$10$YpDyZ3qGVBFuFAe/XQObeu5XIGRLeDa4VmREGlBkIxfledUd4GQUi:test
$2a$10$g<PASSWORD>yI<PASSWORD>r.t<PASSWORD>E<PASSWORD>nK:P<PASSWORD>
$<PASSWORD>r<PASSWORD>fOZOEze5eATd<PASSWORD>j<PASSWORD>a:P<PASSWORD>
$<PASSWORD>/y<PASSWORD>3kw.Cl<PASSWORD>dE/Gxw/<PASSWORD>Y<PASSWORD>jW:P<PASSWORD>
$<PASSWORD>:Secret<PASSWORD>
$<PASSWORD>.P.h<PASSWORD>C1x1pVUNoQw5A/jaUi:ra<PASSWORD>
```
### Untuk bahasa pemrograman Golang masuk ke folder `crack-bcrypt\golang-bcrypt`.
Generate bcrypt hash dari password.
```
Usage: bcrypttest.exe <data>
crack-bcrypt\golang-bcrypt\bcrypttest>bcrypttest.exe P@ssw0rd
data: P@ssw0rd
prefix: 2a
log rounds: 10
salt: Yp1tDRQN.8GWov8cRst1MO
hash: gtB8y35MUgPwERmuk5c5VSg8JErGMQq
bcrypt: $2a$10$Yp1tDRQN.8GWov8cRst1MOgtB8y35MUgPwERmuk5c5VSg8JErGMQq
```
Brute force bcrypt password menggunakan list kamus/dictionary.
```
Usage: bfda.exe <dictionary bcrypt file name> <bcrypt hash>
crack-bcrypt\golang-bcrypt\bfda>bfda.exe dictionary $2a$10$Yp1tDRQN.8GWov8cRst1MOgtB8y35MUgPwERmuk5c5VSg8JErGMQq
P@ssw0rd
```
Mencari password dari suatu list bcrypt password.
```
Usage: sdbm.exe <bcrypt database file name> <clear password>
crack-bcrypt\golang-bcrypt\sdbm>sdbm.exe db P@ssw0rd
$2a$10$gCOjiyIlrHDr.tYAvB1k<PASSWORD>9LqKJh7mkIJEIMdjGC7SWM7nK
$2a$10$498jf/EdrFirxtGGdsfOZOEze5eATdJRBojDP6UgIMTQBEb.yWtaa
$2a$10$I/yNzTkgBSHfleHPMa3kw.Cl7LwASHdE/Gxw/pXoTeYHyGMGtKSjW
```
Mencari password berdasarkan dictionary terhadap list bcrypt password.
```
Usage: mnm.exe <bcrypt database file name> <dictionary file name>
crack-bcrypt\golang-bcrypt\mnm>mnm.exe db dictionary
$2a$10$FMniiwyewiiQDcPZz26X9eOIchXktWO8kBciEEqmx1qRuOA2m7WRO:SecretPassword
$2a$10$jx/qQe.pDuX2h0i9QVyzSOQUQ.P.hDdQQoeC1x1pVUNoQw5A/jaUi:rahas<PASSWORD>3
$2a$10$gCOjiyIlrHDr.tYAvB1kdu32RiEl9LqKJh7mkIJEIMdjGC7SWM7nK:P@ssw0rd
$2a$10$ApnuWv1YaPVOWMccxk8nUeVbpOXyk6g1HG67KpozOb0AH11nPJqa2:password
$2a$10$498jf/EdrFirxtGGdsfOZOEze5eATdJRBojDP6UgIMTQBEb.yWtaa:P@ssw0rd
$2a$10$I/yNzTkgBSHfleHPMa3kw.Cl7LwASHdE/Gxw/pXoTeYHyGMGtKSjW:P@ssw0rd
$2a$10$YpDyZ3qGVBFuFAe/XQObeu5XIGRLeDa4VmREGlBkIxfledUd4GQUi:test
```
## Solusi
Sangat direkomendasikan untuk mengenkripsi password yang akan disimpan di database menggunakan kriptografi simetrik key misalnya dengan algoritma AES. Kemudian amankan simetrik key tersebut menggunakan hardware khusus kriptografi tersertifikasi yaitu HSM (Hardware Security Module) seperti [payShield 10K HSM](https://dymarjaya.co.id/product/payshield-10000/). Hal ini dilakukan untuk mengupayakan keamanan dalam proteksi data atau aset berharga perusahaan.
Untuk informasi bagaimana mengintegrasikan aplikasi dengan HSM, dapat mengunjungi [repository](https://github.com/dymarjaya/payshield-hsm-api) berikut ini. Untuk informasi lebih lanjut dapat menghubungi [Dymar](https://www.dymarjaya.co.id).
<file_sep>/crack-bcrypt/golang-bcrypt/bcrypttest/bcrypttest.go
// PT Dymar Jaya Indonesia
// Bcrypt test
// required: go get golang.org/x/crypto/bcrypt
package main
import (
"flag"
"fmt"
"os"
"golang.org/x/crypto/bcrypt"
)
func HashPassword(password string) (string, error) {
//cost factor. min:4, max:31
bytes, err := bcrypt.GenerateFromPassword([]byte(password), 10)
return string(bytes), err
}
func CheckPasswordHash(password, hash string) bool {
err := bcrypt.CompareHashAndPassword([]byte(hash), []byte(password))
return err == nil
}
func main() {
flag.Usage = func() {
fmt.Printf("Usage: %s <data>\n", os.Args[0])
flag.PrintDefaults()
}
flag.Parse()
if flag.NArg() != 1 {
flag.Usage()
os.Exit(1)
}
data := os.Args[1]
hash, _ := HashPassword(data) // ignore error for the sake of simplicity
fmt.Println("data: ",data )
fmt.Println("prefix:", string(hash)[1:1+2])
fmt.Println("log rounds: ", string(hash)[4:4+2])
fmt.Println("salt: ", string(hash)[7:7+22])
fmt.Println("hash: ", string(hash)[29:])
fmt.Println("bcrypt: ", hash)
//match := CheckPasswordHash(password, hash)
//fmt.Println("Match: ", match)
}
<file_sep>/crack-bcrypt/java-bcrypt/mnm.java
// PT Dymar Jaya Indonesia
// mnm - mix and match
// mix between bfda and sdbm functionality
import java.security.NoSuchAlgorithmException;
import java.security.SecureRandom;
import java.util.Scanner;
import java.io.File;
import java.io.FileNotFoundException;
import java.lang.RuntimeException;
import java.util.*;
public class mnm {
public static void main(String[] args) {
if ((args.length != 2)) {
System.out.println("Usage: ");
System.out.println("java mnm <Bcrypt Database File Name> <Dictionary File Name> ");
return;
}
Scanner s, ss = null;
try{
s = new Scanner(new File(args[0]));
ss = new Scanner(new File(args[1]));
bcrypttest bctest = new bcrypttest();
List<String> temps = new ArrayList<String>();
while (ss.hasNext()){
String word = ss.next();
temps.add(word);
}
ss.close();
List<String> temps2 = new ArrayList<String>();
while (s.hasNext()){
String bc = s.next();
temps2.add(bc);
}
s.close();
String[] bcrypt = temps2.toArray(new String[0]);
String[] words = temps.toArray(new String[0]);
for (int i=0; i< words.length ; i++){
for (int j=0; j< bcrypt.length ; j++){
boolean verify = bctest.verifyHash(words[i], bcrypt[j]);
if(verify == true){
System.out.println(bcrypt[j] + ":" + words[i] );
}
}
}
} catch (FileNotFoundException e) {
throw new RuntimeException(e);
}
}
/*
* generate BCrypt hash
*/
public String generateHash(String plainText) {
try {
String salt = BCrypt.gensalt(10, SecureRandom.getInstance("SHA1PRNG"));
return BCrypt.hashpw(plainText, salt);
} catch (NoSuchAlgorithmException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
/*
* BCrypt check
*/
public boolean verifyHash(String plainText, String cipher) {
return BCrypt.checkpw(plainText, cipher);
}
}
<file_sep>/crack-bcrypt/golang-bcrypt/bfda/bfda.go
// PT Dymar Jaya Indonesia
// bfda - brute force dictionary attack
// brute force bcrypt with dictionary attack
// required: go get golang.org/x/crypto/bcrypt
package main
import (
"bufio"
"flag"
"fmt"
"log"
"os"
"golang.org/x/crypto/bcrypt"
)
func HashPassword(password string) (string, error) {
bytes, err := bcrypt.GenerateFromPassword([]byte(password), 14)
return string(bytes), err
}
func CheckPasswordHash(password, hash string) bool {
err := bcrypt.CompareHashAndPassword([]byte(hash), []byte(password))
return err == nil
}
func main() {
flag.Usage = func() {
fmt.Printf("Usage: %s <dictionary bcrypt file name> <bcrypt hash>\n", os.Args[0])
flag.PrintDefaults()
}
flag.Parse()
if flag.NArg() != 2 {
flag.Usage()
os.Exit(1)
}
fileName := os.Args[1]
file, err := os.Open(fileName)
if err != nil {
log.Fatalf("failed opening file: %s", err)
}
scanner := bufio.NewScanner(file)
scanner.Split(bufio.ScanLines)
var txtlines []string
for scanner.Scan() {
txtlines = append(txtlines, scanner.Text())
}
file.Close()
hash := os.Args[2]
for _, eachline := range txtlines {
match := CheckPasswordHash(eachline, hash)
if match == true {
fmt.Println(eachline)
}
}
}
|
ebf457d7eec23a5ea8ed80c43f3fa9b468fa969d
|
[
"Markdown",
"SQL",
"Java",
"Go"
] | 5
|
SQL
|
saefullohmaslul/crack-bcrypt-password
|
c7aeb33654576e1b84becb5c9653dc5a5c35278a
|
60329160fb210f385bd8b66ae6ea4c9b65420d20
|
refs/heads/master
|
<file_sep>module Ssosdk
class YufuSdkRoleConstants
ROLE_IDP = "ROLE_IDP"
ROLE_SP = "ROLE_SP"
end
end<file_sep>module Ruby
module YufuSsoSdk
VERSION = "0.1.0"
end
end
<file_sep>require "ruby/ssosdk/version"
module Ruby
module YufuSsoSdk
class Error < StandardError; end
# Your code goes here...
end
end
<file_sep>module Ssosdk
require 'jwt'
require 'openssl'
class RSATokenVerifier
def initialize(publicKeyInfo)
@publicKey = OpenSSL::PKey::RSA.new File.read publicKeyInfo
end
def verify(token)
payload, header = JWT.decode token, @publicKey, true, {:verify_expiration => true, :verify_not_before => true, :verify_iat => true, :algorithm => 'RS256'}
[payload, header]
end
end
end
<file_sep>## 玉符单点登录 SDK
玉符SDK集成了签署和验证JWT令牌的功能,使得身份提供者(IDP)和服务提供者(SP)只需要用很少的工作量就可以快速将玉符提供的单点登录等功能集成到现有的服务中。
## 单点登录SDK简介
作为服务提供者(SP),可以使用玉符SDK验证JWT令牌的有效性(包括有效期、签名等),验证成功后可取出token中字段进行相应的鉴权。
作为身份提供者(IDP),可以使用玉符SDK灵活进行参数配置,并生成带有token的跳转url,进行单点登录功能。
## 安装
Add this line to your application's Gemfile:
```ruby
gem 'ruby-sso-sdk'
```
And then execute:
$ bundle
Or install it yourself as:
gem "ruby-sso-sdk", :git => "git://github.com/306994914/ruby-sso-sdk.git"
## SDK使用
1.服务提供者(SP)
使用必要信息初始化SDK
```ruby
@yufuAuth = Ssosdk::YufuAuth.new
@yufuAuth.initializeVerifier(keyPath)
jwt = @yufuAuth.verifyToken(testToken)
```
## 身份提供者(IDP)
1.自定义jwt的参数
```ruby
claims = {Ssosdk::YufuTokenConstants::APP_INSTANCE_ID_KEY => "testAppInstanceId", "customFieldsKey" => "customFieldsValue"}
```
2.使用必要信息初始化SDK(必要参数在玉符初始化后获取)
```ruby
@yufuAuth = Ssosdk::YufuAuth.new
@yufuAuth.initializeGenerator(keyPath, "testIssuer", "testTenant", "2bf935821aa33e693d39ab569ba557aa0af8e02e")
idp_token = @yufuAuth.generateToken(claims)
jwt = JWT.decode idp_token, nil, false
puts @yufuAuth.generateIDPRedirectUrl(claims)
```
<file_sep>module Ssosdk
class YufuTokenConstants
IDP_TOKEN_CONSUME_URL = "https://portal.yufuid.com/api/v1/external/sso"
TENANT_ID_KEY = "tnt"
APP_INSTANCE_ID_KEY = "appInstanceId"
KEY_ID_SEPARATOR = "###"
AUDIENCE_YUFU = "yufu"
TOKEN_EXPIRE_TIME_IN_MS = 300000
end
end<file_sep>module Ssosdk
class RSATokenGenerator
def initialize(privateKeyPath, issuer, tenant, keyFingerPrint)
@issuer = issuer
@tenant = tenant
@keyId = keyFingerPrint == nil ? issuer : issuer + YufuTokenConstants::KEY_ID_SEPARATOR + keyFingerPrint
if privateKeyPath.nil?
raise "key filename cannot be blank"
end
#读取私钥
@rsa_private = OpenSSL::PKey::RSA.new(File.read(privateKeyPath))
end
def generate(claims)
audience = claims["aud"]
if audience.nil?
audience = YufuTokenConstants::AUDIENCE_YUFU
end
iat = Time.now.to_i
exp = Time.now.to_i + YufuTokenConstants::TOKEN_EXPIRE_TIME_IN_MS
payload = {:aud => audience, :exp => exp, :iat => iat, :iss => @issuer, :tnt => @tenant}
payload=payload.merge(claims)
JWT.encode payload, @rsa_private, 'RS256', {keyId: @keyId, :type => "JWT"}
end
def generate_idp_redirect_url(claims)
Ssosdk::YufuTokenConstants.IDP_TOKEN_CONSUME_URL + "?idp_token=" + generate(claims)
end
end
end
<file_sep>require 'minitest/autorun'
require 'pathname'
require_relative '../../lib/ruby/ssosdk/yufu_auth'
require_relative '../../lib/ruby/ssosdk/constants/yufu_token_constants'
class SsosdkTest < Minitest::Test
def setup
@path = Pathname.new(File.dirname(__FILE__)).realpath
end
def test_verify_token
keyPath = "#{@path}/testPublicKey.pem"
testToken = "<KEY>"
@yufuAuth = Ssosdk::YufuAuth.new
@yufuAuth.initialize_verifier(keyPath)
jwt = @yufuAuth.verify_token(testToken)
payload = jwt[0]
assert_equal "yufu", payload['iss']
assert_equal "<EMAIL>", payload['sub']
assert_equal "297220", payload['tnt']
end
def test_generate_token
keyPath = "#{@path}/testPrivateKey.pem"
claims = {Ssosdk::YufuTokenConstants::APP_INSTANCE_ID_KEY => "testAppInstanceId", "customFieldsKey" => "customFieldsValue"}
@yufuAuth = Ssosdk::YufuAuth.new
@yufuAuth.initialize_generator(keyPath, "testIssuer", "testTenant", "<KEY>")
idp_token = @yufuAuth.generate_token(claims)
jwt = JWT.decode idp_token, nil, false
puts @yufuAuth.generate_idp_redirect_url(claims)
header = jwt[1]
assert_equal "testIssuer###<KEY>", header["keyId"]
payload = jwt[0]
assert_equal "testAppInstanceId", payload[Ssosdk::YufuTokenConstants::APP_INSTANCE_ID_KEY]
assert_equal "testIssuer", payload["iss"]
assert_equal "testTenant", payload["tnt"]
end
end
<file_sep>module Ssosdk
require_relative '../../../lib/ruby/ssosdk/token/rsa_token_generator'
require_relative '../../../lib/ruby/ssosdk/token/rsa_token_verifier'
class YufuAuth
@tokenVerifier
@tokenGenerator
def initialize_verifier(keyPath)
@tokenVerifier=Ssosdk::RSATokenVerifier.new(keyPath)
end
def initialize_generator(keyPath, issuer, tenantId, keyFingerPrint=nil )
if keyPath.nil?
raise("private key must be set")
end
@tokenGenerator = Ssosdk::RSATokenGenerator.new(keyPath, issuer, tenantId, keyFingerPrint)
@tenantId=tenantId
end
def generate_token(claims)
token=@tokenGenerator.generate(claims)
token
end
def generate_idp_redirect_url(claims)
Ssosdk::YufuTokenConstants::IDP_TOKEN_CONSUME_URL + "?idp_token=" + generate_token(claims)
end
def verify_token(token)
payload, header=@tokenVerifier.verify(token)
[payload, header]
end
end
end<file_sep>source "https://rubygems.org"
gem 'jwt', '~> 2.1'
gem 'minitest', '~> 5.8', '>= 5.8.4'
# Specify your gem's dependencies in ruby-sso-sdk.gemspec
|
edd73abe31fc48f199cf251b064e1412a379d24f
|
[
"Markdown",
"Ruby"
] | 10
|
Ruby
|
yufuid/sso-ruby23-sdk
|
f365c49051070447aeeab5f4601e47e7d6bc40c0
|
776d1ed3a9215c58e83785cb9201570091f4db1c
|
refs/heads/master
|
<repo_name>stjernaluiht/semeion-digits<file_sep>/script_practica.R
##
## Lectura y preparación de los datos
##
semeion <- read.table("semeion.data", quote="\"")
semeion$digit <- NA
semeion$digit[which(semeion$V257 == 1)] <- 0
semeion$digit[which(semeion$V258 == 1)] <- 1
semeion$digit[which(semeion$V259 == 1)] <- 2
semeion$digit[which(semeion$V260 == 1)] <- 3
semeion$digit[which(semeion$V261 == 1)] <- 4
semeion$digit[which(semeion$V262 == 1)] <- 5
semeion$digit[which(semeion$V263 == 1)] <- 6
semeion$digit[which(semeion$V264 == 1)] <- 7
semeion$digit[which(semeion$V265 == 1)] <- 8
semeion$digit[which(semeion$V266 == 1)] <- 9
semeion <- semeion[,-c(257,258,259,260,261,262,263,264,265,266)]
semeion$digit <- factor(semeion$digit)
semeion <- semeion[sample(nrow(semeion)),] #desordenamos filas
##
## Feature extraction
##
# Number of black pixels in the window
pixNegros.digit <- function (data.digit) {
(sum(data.digit==1))
}
pixNegros.all <- function (data.all) {
(apply(data.all,1,pixNegros.digit))
}
# Pixel density between upper and lower contour
upper.lower.density.digit <- function (data.digit) {
upper <- data.digit[1:127]
lower <- data.digit[128:256]
(pixNegros.digit(upper)/pixNegros.digit(lower))
}
upper.lower.density.all <- function (data.all) {
(apply(data.all,1,upper.lower.density.digit))
}
# Pixel density between left and right contour
right.left.density.digit <- function (data.digit) {
indexs <- c(1:8,17:24,33:40,49:56,65:72,81:88,97:104,113:120,129:136,
145:152,161:168,177:184,193:200,209:216,225:232,241:248)
left <- data.digit[indexs]
right <- data.digit[-indexs]
(pixNegros.digit(left)/pixNegros.digit(right))
}
right.left.density.all <- function (data.all) {
(apply(data.all,1,right.left.density.digit))
}
# Maximum vertical transitions
vert.transitions.digit.col <- function (data.digit.col) {
digit <- data.digit.col[1]
i = 2
max = 0
(data.digit.col[1] == 0)
while (i<=16){
if (data.digit.col[i] > 0 && digit==0) { max=max+1; digit <- 1; }
else if (data.digit.col[i] == 0 && digit==1) { max=max+1; digit <- 0 }
i=i+1
}
(max)
}
vert.transitions.digit <- function (data.digit) {
r <- t(matrix(data.digit, nrow = 16, ncol = 16))
num <- apply(r,2,vert.transitions.digit.col)
(max(num))
}
vert.transitions.all <- function (data.all) {
(apply(data.all,1,vert.transitions.digit))
}
# Number of cycles
num.cycles.digit <- function(data.digit) {
digMatrix <- paste(data.digit, sep = " ", collapse = " ")
cmd <- paste("echo",digMatrix,"| ./cycles") #no funcionará en Windows
res <- system(cmd, intern=TRUE)
as.numeric(res)
}
num.cycles.all <- function(data.all) {
(apply(data.all,1,num.cycles.digit))
}
# Marginal sums
marginal.sums.digit <- function(data.digit) {
r <- t(apply(matrix(data.digit, nrow = 16, ncol = 16), c(1,2), as.numeric))
c(colSums(r), rowSums(r))
}
marginal.sums.all <- function(data.all) {
t(apply(data.all,1,marginal.sums.digit))
}
# Number of edges
num.edges.digit <- function(data.digit) {
digMatrix <- paste(data.digit, sep = " ", collapse = " ")
cmd <- paste("echo",digMatrix,"| ./edges") #no funcionará en Windows
res <- system(cmd, intern=TRUE)
as.numeric(res)
}
num.edges.all <- function(data.all) {
(apply(data.all,1,num.edges.digit))
}
##
## Obtención de los conjuntos de training/test
##
N <- nrow(semeion)
learn <- sample(1:N, round(N/2))
nlearn <- length(learn)
ntest <- N - nlearn
semeion.train <- semeion[learn,]
semeion.test <- semeion[-learn,]
#Ahora extraemos las nuevas variables
px.n <- pixNegros.all(semeion[,-257])
ul.d <- upper.lower.density.all(semeion[,-257])
rl.d <- right.left.density.all(semeion[,-257])
tr.v <- vert.transitions.all(semeion[,-257])
n.cy <- num.cycles.all(semeion[,-257])
mg.s <- marginal.sums.all(semeion[,-257])
n.ed <- num.edges.all(semeion[,-257])
features <- data.frame(n.ed, mg.s, px.n, ul.d, rl.d, tr.v, n.cy, matrix=semeion[,-257], digit = semeion$digit)
semeion.train.feat <- features[learn,]
semeion.test.feat <- features[-learn,]
library(caret) #para hacer 5x2CV con train, trainControl
##
## Linear Discriminant Analysis
##
library(MASS) #para lda
trc <- trainControl (method="repeatedcv", number=2, repeats=5)
model.lda <- train(digit ~ ., data = semeion.train.feat, method='lda', MaxNWts=3000, maxit=100, trControl=trc, trace = FALSE)
p1 <- as.factor(predict (model.lda, type = "raw"))
t1 <- table(p1,semeion.train.feat$digit)
(error_rate.learn.lda <- 100*(1-sum(diag(t1))/nlearn)) #~0.25%
p2 <- as.factor(predict (model.lda, newdata = semeion.test.feat, type = "raw"))
(t2 <- table(p2, semeion.test.feat$digit))
(error_rate.test.lda <- 100*(1-sum(diag(t2))/ntest)) #~13.05%
##
## Multinomial logistic regression
##
library(nnet) #para multinom
# Utilizamos trainControl para hacer 5x2CV; regularizamos con el parámetro decay
trc <- trainControl (method="repeatedcv", number=2, repeats=5)
model.mnm <- train(digit ~ ., data = semeion.train.feat, method='multinom', MaxNWts=3000, maxit=100, trControl=trc, trace = FALSE,
tuneGrid = expand.grid(decay=seq(0.1,0.9,0.1)))
model.mnm$results
model.mnm$bestTune #decay: 0.7
plot(model.mnm)
p1 <- as.factor(predict (model.mnm, type = "raw"))
t1 <- table(p1, semeion.train.feat$digit)
(error_rate.learn.mnm <- 100*(1-sum(diag(t1))/nlearn)) #~0%
p2 <- as.factor(predict (model.mnm, newdata = semeion.test.feat, type = "raw"))
(t2 <- table(p2, semeion.test.feat$digit))
(error_rate.test.mnm <- 100*(1-sum(diag(t2))/ntest)) #~7.15%
##
## Multilayer Perceptron
##
trc <- trainControl (method="repeatedcv", number=2, repeats=5)
model.mlp <- train(digit ~ ., data = semeion.train.feat, method='nnet', MaxNWts=15000, maxit=50, trace = FALSE,
tuneGrid = expand.grid(.size=seq(20,40,by=5),.decay=0), trControl=trc)
model.mlp$results
model.mlp$bestTune #size 35
plot(model.mlp)
bestSize <- model.mlp$bestTune$size
trc <- trainControl (method="repeatedcv", number=2, repeats=5)
model.mlp <- train(digit ~ ., data = semeion.train.feat, method='nnet', MaxNWts=15000, maxit=50, trace = FALSE,
tuneGrid = expand.grid(.size=bestSize,.decay=seq(0.1,0.9,0.1)), trControl=trc)
model.mlp$results
model.mlp$bestTune #size: 35, decay: 0.3
plot(model.mlp)
p1 <- as.factor(predict (model.mlp, type = "raw"))
t1 <- table(p1, semeion.train.feat$digit)
(error_rate.learn.mlp <- 100*(1-sum(diag(t1))/nlearn)) #~7.28%
p2 <- as.factor(predict (model.mlp, newdata = semeion.test.feat, type = "raw"))
(t2 <- table(p2, semeion.test.feat$digit))
(error_rate.test.mlp <- 100*(1-sum(diag(t2))/ntest)) #~20.57%
##
## Support Vector Machine (RBF Kernel)
##
trc <- trainControl (method="repeatedcv", number=2, repeats=5)
model.svm <- train(digit ~ ., data = semeion.train.feat, method='svmRadialCost', MaxNWts=10000, maxit=100, trace = FALSE,
tuneGrid = expand.grid(.C=seq(1,20)), trControl=trc)
model.svm$results
model.svm$bestTune #C: 2, sigma: 0.001798363
plot(model.svm)
p1 <- as.factor(predict (model.svm, type = "raw"))
t1 <- table(p1, semeion.train.feat$digit)
(error_rate.learn.svm <- 100*(1-sum(diag(t1))/nlearn)) #~0.25%
p2 <- as.factor(predict (model.svm, newdata = semeion.test.feat, type = "raw"))
(t2 <- table(p2, semeion.test.feat$digit))
(error_rate.test.svm <- 100*(1-sum(diag(t2))/ntest)) #~3.88%
which(p2 != semeion.test.feat$digit) #dígitos clasificados erróneamente
model.svm$finalModel@coef #pesos del modelo
## Elegimos este modelo como clasificador final
##
## Gráficos y funciones auxiliares
##
bp <- barplot(t2[row(t2)==col(t2)]/colSums(t2)*100, names.arg = c(0,1,2,3,4,5,6,7,8,9),
ylim = c(80,100.1), xpd = FALSE, xlab = "Dígito", ylab = "Porcentaje de precisión",
main = "Precisión con SVM (kernel RBF)")
axis(1, at=bp, labels=FALSE)
errors <- c(error_rate.test.lda, error_rate.test.mnm, error_rate.test.mlp, error_rate.test.svm)
c1 <- colors()[562]
c2 <- colors()[555]
barplot(errors, names.arg = c("LDA","Mnm","MLP","SVM"), col = c(c2,c2,c1,c1), ylim = c(0,21),
xlab = "Modelo de clasificador", ylab = "Error de generalización (estimado)",
main = "Estimación del error de generalización \nde los modelos estudiados")
outputDigit <- function(data, index, hidezeros=TRUE) {
l <- data[index,]
n <- l$digit
if (hidezeros) l[l == 0] = ' '
message(sprintf("%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n%s\n",
paste(l[1:16], collapse = ''), paste(l[17:32], collapse = ''),
paste(l[33:48], collapse = ''), paste(l[49:64], collapse = ''),
paste(l[65:80], collapse = ''), paste(l[81:96], collapse = ''),
paste(l[97:112], collapse = ''), paste(l[113:128], collapse = ''),
paste(l[129:144], collapse = ''), paste(l[145:160], collapse = ''),
paste(l[161:176], collapse = ''), paste(l[177:192], collapse = ''),
paste(l[193:208], collapse = ''), paste(l[209:224], collapse = ''),
paste(l[225:240], collapse = ''), paste(l[241:256], collapse = '')))
message(sprintf("(%s)\n", n))
}
outputDigit(semeion, 1)
<file_sep>/cycles.cpp
/**
* Este programa devuelve el número de ciclos de una matriz booleana que
* representa un dígito manuscrito (1 = trazo del carácter).
*
* Utilizamos un algoritmo basado en BFS con una estructura Union-Find
* para obtener el número de componentes conexas del grafo; tomamos que
* dos píxeles/vértices pertenecen a la misma CC si tienen el mismo valor
* en la matriz.
*
* Por ejemplo: 1 tiene una CC (marco exterior), 0 tiene dos, 8 tiene tres.
*
* Entrada: secuencia de 0s y 1s que representa la matriz binaria de un dígito
*/
#include <iostream>
#include <vector>
using namespace std;
#define RSIZE 16
#define MSIZE 18
typedef vector<vector<int> > Matrix;
struct UnionFind {
int nSets;
vector<int> setId;
vector<int> setSize;
inline UnionFind(int n) {
nSets = n;
setId = vector<int>(n);
setSize = vector<int>(n);
for (int i = 0; i < n; ++i) {
setId[i] = i;
setSize[i] = 1;
}
}
inline int findSet(int x) {
if (setId[x] != x) setId[x] = findSet(setId[x]);
return setId[x];
}
inline void unionSet(int x, int y) {
int rootX = findSet(x);
int rootY = findSet(y);
if (rootX == rootY) return;
if (setSize[rootX] < setSize[rootY]) {
setId[rootX] = rootY;
setSize[rootY] += setSize[rootX];
setSize[rootX] = 0;
} else {
setId[rootY] = rootX;
setSize[rootX] += setSize[rootY];
setSize[rootY] = 0;
}
--nSets;
}
//Devuelve el número de CCs
inline int numSets() {
return nSets;
}
//Devuelve el número de CCs que contengan sólo píxeles en 1
inline int numWalls(Matrix& m) {
int res = 0;
for (int i = 0; i < setSize.size(); ++i) {
if (setSize[i] > 0 and m[i/MSIZE][i%MSIZE] == 1) ++res;
}
return res;
}
};
void checkSets(Matrix& m, UnionFind& uf, int i, int j, int x, int y) {
if (i < 0 or j < 0 or i >= MSIZE or j >= RSIZE+2 or m[i][j] != m[x][y]) return;
else if (uf.findSet(i*MSIZE+j) != uf.findSet(x*MSIZE+y)) {
uf.unionSet(uf.findSet(i*MSIZE+j), uf.findSet(x*MSIZE+y));
}
}
int main() {
Matrix dig(MSIZE, vector<int>(MSIZE, 0));
for (int i = 1; i < MSIZE-1; ++i) {
for (int j = 1; j < MSIZE-1; ++j) {
cin >> dig[i][j];
}
}
UnionFind uf(MSIZE*MSIZE);
for (int i = 0; i < MSIZE; ++i) {
for (int j = 0; j < MSIZE; ++j) {
checkSets(dig, uf, i-1, j, i, j);
checkSets(dig, uf, i+1, j, i, j);
checkSets(dig, uf, i, j-1, i, j);
checkSets(dig, uf, i, j+1, i, j);
checkSets(dig, uf, i-1, j-1, i, j);
checkSets(dig, uf, i+1, j+1, i, j);
checkSets(dig, uf, i+1, j-1, i, j);
checkSets(dig, uf, i-1, j+1, i, j);
}
}
cout << uf.numSets() - uf.numWalls(dig) << endl;
}
<file_sep>/Makefile
all: practica
practica: cycles.cpp edges.cpp
g++ cycles.cpp -o cycles
g++ edges.cpp -o edges
clean:
rm -rf cycles edges
<file_sep>/README.md
# semeion-digits
Handwritten digit classification (machine learning approach)
Machine Learning (Facultat d'Informàtica de Barcelona - UPC) - Fall 2014
This project was carried out in teamwork with <NAME> and <NAME>. Our task consisted in analyzing the dataset and extracting new useful and discriminant variables, _features_; then, modeling the data with different techniques using a 5x2CV resampling method, and finally choosing and discussing the effectivity of our best model.
We managed to achieve a generalization error of 96.12% using Support Vector Machines.
The original dataset is available from UC Irvine Machine Learning Repository: [Semeion Handwritten Digit Data Set](http://archive.ics.uci.edu/ml/datasets/Semeion+Handwritten+Digit)
<file_sep>/edges.cpp
/**
* Este programa devuelve el número de "picos" de una matriz booleana que
* representa un dígito manuscrito (1 = trazo del carácter).
*
* Buscamos en la matriz una serie de patrones que representan los bordes o
* picos del dígito. Utilizamos una estructura Union-Find para agrupar los
* bordes más cercanos (distancia euclidiana) según un valor de threshold.
*
* Entrada: secuencia de 0s y 1s que representa la matriz binaria de un dígito
*/
#include <iostream>
#include <vector>
#include <set>
#include <cmath>
using namespace std;
#define MSIZE 18
#define PSIZE 3
#define THRSH 5
typedef vector<vector<int> > Matrix;
typedef int Pattern[PSIZE][PSIZE];
struct UnionFind {
int nSets;
vector<int> setId;
vector<int> setSize;
inline UnionFind(int n) {
nSets = n;
setId = vector<int>(n);
setSize = vector<int>(n);
for (int i = 0; i < n; ++i) {
setId[i] = i;
setSize[i] = 1;
}
}
inline int findSet(int x) {
if (setId[x] != x) setId[x] = findSet(setId[x]);
return setId[x];
}
inline void unionSet(int x, int y) {
int rootX = findSet(x);
int rootY = findSet(y);
if (rootX == rootY) return;
if (setSize[rootX] < setSize[rootY]) {
setId[rootX] = rootY;
setSize[rootY] += setSize[rootX];
setSize[rootX] = 0;
} else {
setId[rootY] = rootX;
setSize[rootX] += setSize[rootY];
setSize[rootY] = 0;
}
--nSets;
}
inline int numSets() {
return nSets;
}
};
struct Pair {
int fst, snd;
inline Pair() {}
inline Pair(int a, int b) {
fst = a; snd = b;
}
};
struct Comparator {
bool operator() (const Pair& a, const Pair& b) const {
return a.fst != b.fst and a.snd != b.snd;
}
};
bool closePoint(const Pair& a, const Pair& b) {
int dist = sqrt((a.fst - b.fst)*(a.fst - b.fst) + (a.snd - b.snd)*(a.snd - b.snd));
return dist < THRSH;
}
bool isEdge(Matrix& m, int ci, int cj, Pattern& p, set<Pair, Comparator>& v) {
int i = ci-1;
for (int x = 0; x < PSIZE; ++x) {
int j = cj-1;
for (int y = 0; y < PSIZE; ++y) {
if (p[x][y] != m[i][j] and p[x][y] != 2 and m[i][j] != 3) return false;
++j;
}
++i;
}
//Añado al conjunto de bordes candidatos todos los puntos
//que coinciden con un "1" en el patrón.
for (int x = 0; x < PSIZE; ++x) {
for (int y = 0; y < PSIZE; ++y) {
if (p[x][y] == 1) v.insert(Pair(ci-1+x, cj-1+y));
}
}
return true;
}
int main() {
Matrix dig(MSIZE, vector<int>(MSIZE, 0));
for (int i = 1; i < MSIZE-1; ++i) {
for (int j = 1; j < MSIZE-1; ++j) {
cin >> dig[i][j];
}
}
//Para todos los patrones:
// - 0 y 1 para hacer matching con la matriz
// - 2 como wildcard (no importa su contenido)
// - 3 centro de un patrón encontrado (sólo en la matriz "dig")
Pattern edgeBL = { {2, 1, 0},
{1, 1, 0},
{0, 0, 0} };
Pattern edgeTR = { {0, 0, 0},
{0, 1, 1},
{0, 1, 2} };
Pattern edgeTL = { {0, 0, 0},
{1, 1, 0},
{2, 1, 0} };
Pattern edgeBR = { {0, 1, 2},
{0, 1, 1},
{0, 0, 0} };
Pattern edgeL = { {0, 0, 0},
{0, 1, 1},
{0, 0, 0} };
Pattern edgeR = { {0, 0, 0},
{1, 1, 0},
{0, 0, 0} };
Pattern edgeT = { {0, 0, 0},
{0, 1, 0},
{0, 1, 0} };
Pattern edgeB = { {0, 1, 0},
{0, 1, 0},
{0, 0, 0} };
//Buscamos los patrones en la matriz
set<Pair, Comparator> cand;
for (int i = 1; i < MSIZE-1; ++i) {
for (int j = 0; j < MSIZE-1; ++j) {
if (dig[i][j] == 1) {
if (isEdge(dig, i, j, edgeTR, cand) or isEdge(dig, i, j, edgeBL, cand) or isEdge(dig, i, j, edgeTL, cand) or isEdge(dig, i, j, edgeBR, cand) or
isEdge(dig, i, j, edgeT, cand) or isEdge(dig, i, j, edgeB, cand) or isEdge(dig, i, j, edgeL, cand) or isEdge(dig, i, j, edgeR, cand)) {
dig[i][j] = 3;
}
}
}
}
//Agrupamos bordes cercanos
UnionFind uf(cand.size());
int p = 0;
for (set<Pair, Comparator>::iterator it = cand.begin(); it != cand.end(); ++it) {
int q = p+1;
set<Pair, Comparator>::iterator it2 = cand.begin();
for (int i = 0; i < p+1; ++i) ++it2;
while (it2 != cand.end()) {
if (closePoint(*it, *it2) and (uf.findSet(p) != uf.findSet(q))) {
uf.unionSet(uf.findSet(p), uf.findSet(q));
}
++q;
++it2;
}
++p;
}
cout << uf.numSets() << endl;
}
|
2294d9982f2f4d80be7eaf7c933f1d2c2081613c
|
[
"Makefile",
"R",
"C++",
"Markdown"
] | 5
|
R
|
stjernaluiht/semeion-digits
|
54a3d4346916ccf18ff44b9835f94101bdd19d61
|
eab60928b4b70a25e2b12b5b086c75ac36fc6109
|
refs/heads/master
|
<file_sep>import matplotlib
matplotlib.use('agg')
import nose
nose.main()
<file_sep># -*- coding: utf-8 -*-
__author__ = '<NAME>'
__version__ = '0.0.1'
__license__ = 'BSD 3-clause'
from . import validate
from . import utils
from .gridplot import GridAesthetics
<file_sep>import numpy.testing as nptest
import nose.tools as nt
from matplotlib.testing.decorators import image_comparison, cleanup
from matplotlib import pyplot
import pandas
from pygridplot import validate
from six import StringIO
@nt.nottest
def assert_fig_and_ax(fig, ax):
nt.assert_true(isinstance(fig, pyplot.Figure))
nt.assert_true(isinstance(ax, pyplot.Axes))
class test__check_ax(object):
def setup(self):
fig, ax = pyplot.subplots()
self.fig = fig
self.ax = ax
def teardown(self):
pyplot.close('all')
def test_no_ax(self):
fig, ax = validate.figure_axes(None)
assert_fig_and_ax(fig, ax)
def test_ax(self):
fig, ax = validate.figure_axes(self.ax)
assert_fig_and_ax(fig, ax)
nt.assert_equal(fig, self.fig)
nt.assert_equal(ax, self.ax)
@nt.raises(ValueError)
def test_ax_bad_value(self):
fig, ax = validate.figure_axes('junk')
<file_sep>from matplotlib import pyplot
def figure_axes(ax):
if ax is None:
fig, ax = pyplot.subplots()
elif isinstance(ax, pyplot.Axes):
fig = ax.figure
else:
raise ValueError("`ax` is not an Axes object")
return fig, ax
<file_sep>from textwrap import dedent
import nose.tools as nt
import numpy.testing as nptest
import pandas
from six import StringIO
from pygridplot import utils
class test_addSecondColumnLevel(object):
def setup(self):
self.testcsv = StringIO(dedent("""\
Date,A,B,C,D
X,1,2,3,4
Y,5,6,7,8
Z,9,0,1,2
"""))
self.data = pandas.read_csv(self.testcsv, index_col=['Date'])
self.known = pandas.MultiIndex.from_tuples([(u'test', u'A'), (u'test', u'B'),
(u'test', u'C'), (u'test', u'D')])
def test_normal(self):
newdata = utils.addSecondColumnLevel('test', 'testlevel', self.data)
nt.assert_list_equal(self.known.tolist(), newdata.columns.tolist())
@nptest.raises(ValueError)
def test_error(self):
newdata1 = utils.addSecondColumnLevel('test1', 'testlevel1', self.data)
newdata2 = utils.addSecondColumnLevel('test2', 'testlevel2', newdata1)
<file_sep>from __future__ import division
import datetime
import gc
import numpy as np
import pandas
import matplotlib
import matplotlib.pyplot as plt
import matplotlib.animation as animation
from matplotlib.ticker import ScalarFormatter
from matplotlib.collections import PatchCollection
from mpl_toolkits.axes_grid1 import make_axes_locatable
import seaborn as sns
from shapely.geometry import Polygon
from descartes.patch import PolygonPatch
import shapefile
sns.set(style='ticks', context='paper')
def _rotate_tick_labels(ax):
'''
Private function to rotate x-tick labels of an axes
Input
-----
ax : matplotlib.axes.Axes instance
Output
------
None
'''
for label in ax.get_xticklabels():
label.set_rotation_mode('anchor')
label.set_rotation(45)
label.set_horizontalalignment('right')
def read_out(filename, valcol, nrows=None, velocity=False, icol='I_MOD',
jcol='J_MOD', tcol='DUMPID', hcol='ST_hr'):
if velocity:
names = [tcol, hcol ,'END_hr',icol ,
jcol ,'LAYER1' ,'LAYER2' ,'LAYER3' ,
'LAYER4' ,'LAYER5' ,'LAYER6' ,'LAYER7' ,
'LAYER8' ,'LAYER9' ,'LAYER10 ']
output = pandas.read_csv(filename, nrows=nrows, index_col=False,
skiprows=2, header=None, names=names)
else:
output = pandas.read_csv(filename, nrows=nrows, index_col=False, skiprows=1)
newcols = {icol: 'I', jcol: 'J', tcol: 'tstep', valcol: 'value'}
output = output.rename(columns=newcols).set_index(['I', 'J', 'tstep'])
if velocity:
output.loc[:, ['value']]
return output
def readModelGrid(shapefilename, icol='MODI', jcol='MODJ', ijcol_idx=[4, 5]):
'''
Read in the model grid into a pandas dataframe
Input
-----
shapefilename : string
filename and path to the shapefile (extension is optional)
icol : string (default = 'MODI')
name of the column in the shapefile's attribute table for the "x"
coordinate of the model grid
jcol : string (default = 'MODI')
name of the column in the shapefile's attribute table for the "y"
coordinate of the model grid
ijcol_idx : sequence of ints (default = [4, 5])
positions of `icol` and `jcol` in the attribute table
Output
------
grid : pandas.dataframe
index = integer-based multi-index on the values of `icol` and `jcol`
columns = [
cell = shapely.geometry.Polygon object reprsenting the grid cell
area = float value of the area of the grid cell (based on the
shapefiles's coordinate system)
'''
# read the file
shpfile = shapefile.Reader(shapefilename)
# grab field names (first element (0) in everything but the first row (1:))
fieldnames = np.array(shpfile.fields)[1:, 0]
# create a dataframe from the records and field names
grid = pandas.DataFrame(np.array(shpfile.records()), columns=fieldnames)
# convert I, J to ints
grid['I'] = grid.apply(lambda row: int(row[icol]), axis=1)
grid['J'] = grid.apply(lambda row: int(row[jcol]), axis=1)
# set the index in the model grid locations
grid.set_index(['I', 'J'], inplace=True)
# initialize patch column
grid['cell'] = Polygon([(0., 0.), (0., 1.), (1., 1.), (1., 0.), (0., 0.)])
# loop through all of the shapes and records concurrently
for shprec in shpfile.shapeRecords():
shape, row = shprec.shape, shprec.record
# again, need to pull the MODI/J columns to determine index
I = row[ijcol_idx[0]]
J = row[ijcol_idx[1]]
# set the `cell` column for the row to the actual shape
grid.loc[I, J]['cell'] = Polygon(shape.points)
# compute easting and northings from the shapes
grid['easting'] = grid.apply(lambda row: row['cell'].centroid.x, axis=1)
grid['northing'] = grid.apply(lambda row: row['cell'].centroid.y, axis=1)
# return the columns we need
return grid[['cell', 'easting', 'northing']]
def attachAnimateValues(grid, output, valcol, year, month, icol='I_MOD', jcol='J_MOD',
tcol='DUMPID', hcol='ST_hr', nrows=None, newfiletype=False,
resample_out=None, velocity=False):
'''
Reads an output file and add matplotlib patches to grid dataframe for
plotting
Input
-----
grid : pandas.DataFrame
output from `readModelGrid(...)`
filename : string
full path to the model output file
icol : string (default = ' icell' # note the leading space)
name of the column with I grid cell index
jcol : string (default = ' jcell' # note the leading space)
name of the column with J grid cell index
tcol : string (default = ' time_stamp' # note the leading space)
name of the column containing the timestamp
valcol : string (default = ' value' # note the leading space)
name of the column containing the actual values of the output file
cmap : matplotlib.colors.Colormap instance (default = plt.cm.Blues)
colormap to be applied to the values when plotting
Output
------
joined : pandas.dataframe
index = integer-based multi-index on the values of I and J
columns = [
cell = shapely.geometry.Polygon object reprsenting the grid cell
area = float value of the area of the grid cell (based on the
shapefiles's coordinate system)
value = output value read in from `filename`
patch = matplotlib.patches.PathPatch to display each cell
easting = local coordinate easting of the grid cell's centroid
northing = local coordinate northing of the grid cell's centroid
'''
data = output[['value', hcol]]
data.reset_index(inplace=True)
data.dropna(subset=['tstep'], inplace=True)
data['datetime'] = (data[hcol].apply(
lambda dt: datetime.datetime(
year, month, 1) + datetime.timedelta(dt/24)))
if resample_out is not None:
data = (data
.set_index('datetime')
.groupby(['I', 'J'])
.resample(resample_out, how='mean')
.reset_index()
)
# join the output data and drop NA values (i.e. cells with no output data)
joined = grid.join(data.set_index(['I', 'J']), how='outer').dropna()
# joined.set_index(['tstep'], append=True, inplace=True)
# normalize all of the values
joined['normed_value'] = (joined.value - joined.value.min()) / joined.value.max()
# little function to help me make polygon patches for plotting
def makePatch(row):
'''
Input `row` is just a row of a pandas.DataFrame
'''
# rgb = (row['r'], row['b'], row['g'])
patch = PolygonPatch(row['cell'], edgecolor='White', linewidth=0.25)
return patch
# add a matplotlib patch to each row
joined['patch'] = joined.apply(makePatch, axis=1)
joined = joined.reset_index().set_index(['tstep', 'I', 'J'])
return joined
def plotGrid(grid, patchcol='patch', ax=None, cmap=plt.cm.Blues, vextent=None,
log=True, blankgrid=False, **figkwargs):
'''
Creates a matplotlib figure of model grid with some output values assoicated
with each cell
Input
-----
grid : pandas.DataFrame
ideally, this is output from attachOutputValues(...)
patchcol : string (default = 'patch')
name of the column contain the matplotlib.patches.PathPatches in `grid`
ax : matplotlib.axes.Axes instance for None (default = None)
optional axes on which to the plot the data. If None, one will be made.
Output
------
fig : matplotlib.figure.Figure instance
figure containing the plot
'''
figsize = figkwargs.pop('figsize', (6.0, 6.0))
# check the value of `ax`; create if necessary
if ax is None:
fig, ax = plt.subplots(figsize=figsize, **figkwargs)
elif isinstance(ax, plt.Axes):
fig = ax.figure
else:
raise ValueError("`ax` must be None or an MPL Axes object")
# set the axes aspect ratio and limits based on the data
# TODO: this needs to be based on extents, not centroids
ax.set_xlim((grid.easting.min()*.999, grid.easting.max()*1.001))
ax.set_ylim((grid.northing.min()*.999, grid.northing.max()*1.001))
ax.set_aspect('equal')
# create a normalization object based on the data
if vextent is None:
norm = plt.Normalize(vmin=grid.value.min(), vmax=grid.value.max())
if log:
norm = matplotlib.colors.LogNorm(vmin=grid.value.min()+1E-10, vmax=grid.value.max())
else:
norm = plt.Normalize(vmin=np.min(vextent), vmax=np.max(vextent))
if log:
norm = matplotlib.colors.LogNorm(vmin=np.min(ve+1E-10), vmax=np.max(ve))
# create a ScalarMappable based on the normalization object
# (this is what the colorbar will be based off of)
cmap.set_under('white')
cmap.set_bad('white')
sm = plt.cm.ScalarMappable(cmap=cmap, norm=norm)
# and set it's values to the grid.values.column (may be unnecessary)
sm._A = np.array(grid.value.tolist())
# stuff the `patches` (grid cells) column of the grid in a PatchCollection
edgecol = sm.to_rgba(grid.value.values)*0 + .7
if not blankgrid:
facecolors = grid.value.values
else:
facecolors = np.zeros(grid.value.values.shape)
patches = PatchCollection(grid.patch.tolist(), match_original=False,
facecolors=sm.to_rgba(grid.value.values), linewidths=[.25,.25,.25],
edgecolors=edgecol)
# plot the grid cells on the axes
ax.add_collection(patches)
divider = make_axes_locatable(ax)
cax = divider.append_axes("right", size="5%", pad=0.2)
# add ad horizontal colorbar (defaults to bottom of figure)
plt.colorbar(sm, orientation='vertical', cax=cax)
# format the tick labels
fmt = ScalarFormatter(useOffset=False)
ax.xaxis.set_major_formatter(fmt)
ax.yaxis.set_major_formatter(fmt)
_rotate_tick_labels(ax)
sns.despine()
time_text = ax.text(0.8, 0.9, '', transform=ax.transAxes, fontsize=8,
verticalalignment='top')
textstr = grid['datetime'].iloc[0].strftime("%Y-%m-%d")
time_text.set_text(textstr)
# snug up the figure's layout
fig.tight_layout()
return fig
class GridAesthetics(object):
"""
Class to manage shapefile and plotting values.
"""
def __init__(self, results, valcol, shapefile, year, month, icol='I_MOD',
jcol='J_MOD', tcol='DUMPID', gridicol='EFDC_I', gridjcol='EFDC_J',
hcol='ST_hr', ijcol_idx=[4, 5], newfiletype=False, resample_out=None,
velocity=False, u_path=None, v_path=None, uv_valcol=None):
self.shapefile = shapefile
self.results = results
self._u_path = u_path
self._v_path = v_path
self.year = year
self.month = month
self.newfiletype = newfiletype
self._velocity = velocity
self.resample_out = resample_out
self._gridicol = gridicol
self._gridjcol = gridjcol
self._ijcol_idx = ijcol_idx
self._valcol = valcol
self._uv_valcol = uv_valcol
self._icol = icol
self._jcol = jcol
self._tcol = tcol
self._hcol = hcol
self._modelGrid = None
self._gridValues = None
self._uv = None
@property
def modelGrid(self):
if self._modelGrid is None:
gridData = readModelGrid(
self.shapefile,
icol=self._gridicol,
jcol=self._gridjcol,
ijcol_idx=self._ijcol_idx
)
self._modelGrid = gridData
return self._modelGrid
@property
def gridValues(self):
if self._gridValues is None:
if isinstance(self.results, pandas.DataFrame):
newcols = {self._icol: 'I', self._jcol: 'J', self._tcol: 'tstep', self._valcol: 'value'}
output = self.results.rename(columns=newcols).set_index(['I', 'J', 'tstep'])
else:
output = read_out(self.results, self._valcol, nrows=None,
velocity=self._velocity, icol=self._icol,
jcol=self._jcol, tcol=self._tcol, hcol=self._hcol)
gv = attachAnimateValues(self.modelGrid, output,
self._valcol, self.year, self.month, icol=self._icol, jcol=self._jcol,
tcol=self._tcol, nrows=None, newfiletype=self.newfiletype,
resample_out=self.resample_out, velocity=self._velocity
)
self._gridValues = gv
return self._gridValues
@property
def uv_values(self):
if self._uv is None:
if self._uv_valcol is None:
self._uv_valcol = self._valcol
u = read_out(self._u_path, self._uv_valcol, velocity=True, icol=self._icol,
jcol=self._jcol, tcol=self._tcol, hcol=self._hcol)
v = read_out(self._v_path, self._uv_valcol, velocity=True, icol=self._icol,
jcol=self._jcol, tcol=self._tcol, hcol=self._hcol)
uv = u.join(v, how='inner', lsuffix='_u', rsuffix='_v')
uv['value_uv'] = np.sqrt(uv.value_u**2 + uv.value_v**2)
uv = (uv.reset_index(level='tstep', drop=False)
.join(self.modelGrid, how='outer')
.dropna()
.set_index('tstep', append=True)
)
self._uv = uv
return self._uv
def plot(self, timestep, ax=None, cmap=plt.cm.Blues, vextent=None,
log=True, blankgrid=False, **figkwargs):
fig = plotGrid(self.gridValues.xs(timestep, level='tstep'),
ax=ax,cmap=cmap, vextent=vextent, log=log,
blankgrid=blankgrid, **figkwargs)
return fig
def add_plot_vectors(self, ax, timestep, scale=9e-4,
alpha=0.7, headwidth=3.5, legend=False,
legend_xy=(.5, .5), legend_uv=(.5, .5)):
subset = self.uv_values.xs(timestep, level='tstep')
if legend:
cols = ['value_u', 'value_v', 'easting', 'northing']
u, v = legend_uv
easting, northing = legend_xy
subset.loc[(-99, -99), cols] = (u, v, easting, northing)
ax.quiver(subset.easting, subset.northing,
subset.value_u, subset.value_v,
angles='xy', scale_units='xy', scale=scale,
alpha=alpha, headwidth=headwidth)
return ax.figure
<file_sep># Setup script for the pygridplot package
#
# Usage: python setup.py install
#
import os
from setuptools import setup, find_packages
DESCRIPTION = "pygridplot: Visualization tools for pygridgen"
LONG_DESCRIPTION = DESCRIPTION
NAME = "pygridplot"
VERSION = "0.0.1"
AUTHOR = "<NAME> (Geosyntec Consultants)"
AUTHOR_EMAIL = "<EMAIL>"
URL = "https://github.com/Geosyntec/pygridplot"
DOWNLOAD_URL = "https://github.com/Geosyntec/pygridplot/archive/master.zip"
LICENSE = "BSD 3-clause"
PACKAGES = find_packages()
PLATFORMS = "Python 2.7"
CLASSIFIERS = [
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Intended Audience :: Science/Research",
"Topic :: Software Development :: Libraries :: Python Modules",
'Programming Language :: Python :: 2.7',
]
INSTALL_REQUIRES = ['seaborn', 'numexpr', 'descartes', 'shapely', 'pyshp']
PACKAGE_DATA = {}
setup(
name=NAME,
version=VERSION,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
url=URL,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
download_url=DOWNLOAD_URL,
license=LICENSE,
packages=PACKAGES,
package_data=PACKAGE_DATA,
#data_files=DATA_FILES,
platforms=PLATFORMS,
classifiers=CLASSIFIERS,
install_requires=INSTALL_REQUIRES,
)
<file_sep># pygridplot
[](https://travis-ci.org/Geosyntec/pygridplot)
[](https://coveralls.io/github/Geosyntec/pygridplot?branch=master)
Visualize data over a model grid from an ESRI shapefile.
## Installation
Recommended installation is via conda
```
conda create -n animate python=2.7 pandas seaborn numexpr jupyter
activate animate
pip install descartes
conda install --yes --channel=ioos shapely
conda install --yes --channel=pelson pyshp
```
Then, with the environment still activated run the setup.py or install via pip.
In other words, when you're in the directory with setup.py:
`python setup.py install`
or
`pip install .`
<file_sep>import pandas
def addSecondColumnLevel(levelval, levelname, olddf):
'''
Takes a simple index on a dataframe's columns and adds a new level
with a single value.
E.g., df.columns = ['res', 'qual'] -> [('Infl' ,'res'), ('Infl', 'qual')]
'''
if isinstance(olddf.columns, pandas.MultiIndex):
raise ValueError('Dataframe already has MultiIndex on columns')
colarray = [[levelval]*len(olddf.columns), olddf.columns]
colindex = pandas.MultiIndex.from_arrays(colarray)
newdf = olddf.copy()
newdf.columns = colindex
newdf.columns.names = [levelname, 'quantity']
return newdf
|
dc7f2354ae6491ca7f2d43e7bfcf07da348e415c
|
[
"Markdown",
"Python"
] | 9
|
Python
|
Geosyntec/pygridplot
|
bc4c9ad3ab9acd9bfd6a72d2f19a4dff68c1cdd5
|
08baa43a188e70d79c1329daaabbe900fca2c5fa
|
refs/heads/master
|
<repo_name>TuAnhTo/beatvn_test<file_sep>/routes/index.js
const express = require('express');
const {auth} = require("../middleware/auth");
const {insertData, getData} = require("../module");
const router = express.Router();
/* GET home page. */
router.post('/echo',auth, async function (req, res, next) {
try {
return await insertData(req, res, next)
} catch (e) {
console.log(e);
}
});
router.get('/list', async function (req, res, next) {
try {
return await getData(req, res, next)
} catch (e) {
console.log(e)
}
});
module.exports = router;
<file_sep>/middleware/auth.js
const _ = require('lodash')
module.exports.auth = async function auth(req, res, next){
try{
let token = req.headers.authorization
token = token.slice(7)
let TOKEN = process.env.TOKEN
if (_.isEmpty(token)) {
return await res.status(400).send('No token')
} else if (TOKEN === token) {
next()
}
} catch (e){
console.log(e, 'getDataIndex error')
}
}<file_sep>/module/index.js
const _ = require('lodash')
const {insertData, getDataIndex} = require("./helper");
/**
*
* @param req
* @param res
* @param next
* @returns {Promise<*>}
*/
module.exports.insertData = async function getIpAddr(req, res, next) {
try {
let ipAddr = req.headers['x-forwarded-for'] ||
req.connection.remoteAddress ||
req.socket.remoteAddress ||
req.connection.socket.remoteAddress;
let data = {
ip : ipAddr,
data : req.body
};
await insertData(data)
return await res.status(200).send(data, ' ', ipAddr)
} catch (e) {
console.log(e)
}
};
/**
*
* @param req
* @param res
* @param next
* @returns {Promise<*>}
*/
module.exports.getData = async function getData(req, res, next) {
try {
let list = await getDataIndex()
return await res.status(200).send(list)
} catch (e) {
console.log(e)
}
};<file_sep>/database/model/index.model.js
const mongoose = require("mongoose");
const Schema = mongoose.Schema;
const indexSchema = Schema(
{
data: {type: Object }
}
);
module.exports.indexModel = mongoose.model("index", indexSchema, "index");<file_sep>/module/helper.js
const {indexModel} = require("../database/model/index.model");
module.exports.insertData = async function insertData(data){
try{
console.log(data)
return await indexModel.create(data)
} catch (e){
console.log(e, 'insertData error')
}
}
module.exports.getDataIndex = async function getDataIndex(){
try{
return await indexModel.find({})
.sort({_id:-1})
.limit(100)
} catch (e){
console.log(e, 'getDataIndex error')
}
}
|
cde528a21aa646aae62c209f9a77059b22759d07
|
[
"JavaScript"
] | 5
|
JavaScript
|
TuAnhTo/beatvn_test
|
e94fc6b2bab751eb1a2850db0ce0e39182fbba72
|
ae258ecf9e18262afa57219a5f62ae7567773dd2
|
refs/heads/master
|
<file_sep>from django.shortcuts import render
from django.views import generic
from django.http import HttpResponseRedirect, HttpResponse
from .models import *
import re
# Create your views here.
class IndexView(generic.TemplateView):
template_name = "posts/index.html"
class ProjectsView(generic.ListView):
template_name = "posts/projects.html"
context_object_name = 'projects_list'
def get_queryset(self):
return Project.objects.filter(modified_date__lte=timezone.now()
).order_by('-modified_date')
def get_categories(self):
categories_list = list()
project_list = Project.objects.filter(created_date__lte=timezone.now()
).order_by('-created_date')
for project in project_list:
categories = re.split(' ', project.categories)
for cat in categories:
if cat not in categories_list:
categories_list.append(cat)
return categories_list
class CategoriesView(generic.ListView):
template_name = "posts/projects.html"
context_object_name = 'projects_list'
def get_queryset(self):
category = self.kwargs['category']
return Project.objects.filter(categories__contains=category).order_by('-modified_date')
def get_categories(self):
categories_list = list()
project_list = Project.objects.filter(created_date__lte=timezone.now()
).order_by('-created_date')
for project in project_list:
categories = re.split(' ', project.categories)
for cat in categories:
if cat not in categories_list:
categories_list.append(cat)
return categories_list
class BlogView(generic.ListView):
template_name = "posts/blog.html"
context_object_name = 'latest_post_list'
def get_queryset(self):
return Post.objects.filter(created_date__lte=timezone.now()
).order_by('-created_date')
def get_tag_list(self):
tag_list = list()
post_list = Post.objects.filter(created_date__lte=timezone.now()
).order_by('-created_date')
for post in post_list:
tags = re.split(' ', post.tags)
for tag in tags:
if tag not in tag_list:
tag_list.append(tag)
return tag_list
def get_year_month_data(self):
archive_data = dict()
post_list = Post.objects.filter(created_date__lte=timezone.now()
).order_by('-created_date')
for post in post_list:
if post.created_date.year not in archive_data:
archive_data[post.created_date.year] = []
archive_data[post.created_date.year].append(post.created_date.month)
elif post.created_date.month not in archive_data[post.created_date.year]:
archive_data[post.created_date.year].append(post.created_date.month)
return archive_data
class ArchiveYearView(generic.ListView):
template_name = "posts/blog.html"
context_object_name = 'latest_post_list'
def get_queryset(self):
year = self.kwargs['year']
post_list = Post.objects.filter(created_date__year=year).order_by('-created_date')
return post_list
def get_tag_list(self):
tag_list = list()
post_list = Post.objects.filter(created_date__lte=timezone.now()
).order_by('-created_date')
for post in post_list:
tags = re.split(' ', post.tags)
for tag in tags:
if tag not in tag_list:
tag_list.append(tag)
return tag_list
def get_year_month_data(self):
archive_data = dict()
post_list = Post.objects.filter(created_date__lte=timezone.now()
).order_by('-created_date')
for post in post_list:
if post.created_date.year not in archive_data:
archive_data[post.created_date.year] = []
archive_data[post.created_date.year].append(post.created_date.month)
elif post.created_date.month not in archive_data[post.created_date.year]:
archive_data[post.created_date.year].append(post.created_date.month)
return archive_data
class ArchiveMonthView(generic.ListView):
context_object_name = 'latest_post_list'
template_name = "posts/blog.html"
def get_queryset(self):
year = self.kwargs['year']
month = self.kwargs['month']
post_list = Post.objects.filter(created_date__year=year).filter(created_date__month=month).order_by('-created_date')
return post_list
def get_tag_list(self):
tag_list = list()
post_list = Post.objects.filter(created_date__lte=timezone.now()
).order_by('-created_date')
for post in post_list:
tags = re.split(' ', post.tags)
for tag in tags:
if tag not in tag_list:
tag_list.append(tag)
return tag_list
def get_year_month_data(self):
archive_data = dict()
post_list = Post.objects.filter(created_date__lte=timezone.now()
).order_by('-created_date')
for post in post_list:
if post.created_date.year not in archive_data:
archive_data[post.created_date.year] = []
archive_data[post.created_date.year].append(post.created_date.month)
elif post.created_date.month not in archive_data[post.created_date.year]:
archive_data[post.created_date.year].append(post.created_date.month)
return archive_data
class TagView(generic.ListView):
template_name = "posts/blog.html"
context_object_name = 'latest_post_list'
def get_queryset(self):
tag = self.kwargs['tag']
return Post.objects.filter(tags__contains=tag).order_by('-created_date')
def get_tag_list(self):
tag_list = list()
post_list = Post.objects.filter(created_date__lte=timezone.now()
).order_by('-created_date')
for post in post_list:
tags = re.split(' ', post.tags)
for tag in tags:
if tag not in tag_list:
tag_list.append(tag)
return tag_list
def get_year_month_data(self):
archive_data = dict()
post_list = Post.objects.filter(created_date__lte=timezone.now()
).order_by('-created_date')
for post in post_list:
if post.created_date.year not in archive_data:
archive_data[post.created_date.year] = []
archive_data[post.created_date.year].append(post.created_date.month)
elif post.created_date.month not in archive_data[post.created_date.year]:
archive_data[post.created_date.year].append(post.created_date.month)
return archive_data
class ContactView(generic.TemplateView):
template_name = "posts/contact.html"
<file_sep>from django.contrib import admin
from .models import *
# Register your models here.
class PostModelAdmin(admin.ModelAdmin):
list_display = ["title", "modified_date", "created_date"]
list_display_links = ["title", "modified_date", "created_date"]
list_filter = ["title", "modified_date", "created_date"]
search_fields = ["title", "content"]
class Meta:
model = Post
class ProjectModelAdmin(admin.ModelAdmin):
list_display = ["name", "modified_date", "created_date"]
list_display_links = ["name", "modified_date", "created_date"]
list_filter = ["name", "modified_date", "created_date"]
search_fields = ["name", "content"]
class Meta:
model = Project
admin.site.register(Post, PostModelAdmin)
admin.site.register(Project, ProjectModelAdmin)
<file_sep>from django.conf.urls import url
from . import views
app_name = 'posts'
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name="index"),
url(r'^projects/$', views.ProjectsView.as_view(), name="projects"),
url(r'^projects/(?P<category>[\w\-]+)$', views.CategoriesView.as_view(), name="categories"),
url(r'blog/$', views.BlogView.as_view(), name="blog"),
url(r'blog/tags/(?P<tag>[\w\-]+)$', views.TagView.as_view(), name="tag"),
url(r'blog/(?P<year>[0-9]{4})$', views.ArchiveYearView.as_view(), name="year"),
url(r'blog/(?P<year>[0-9]{4})/(?P<month>[0-9])/$', views.ArchiveMonthView.as_view(), name="month"),
url(r'contact/$', views.ContactView.as_view(), name="contact"),
]<file_sep>dj-database-url==0.4.1
dj-static==0.0.6
Django==1.9.7
django-appconf==1.0.2
django-imagekit==3.3
django-toolbelt==0.0.1
gunicorn==19.6.0
pilkit==1.1.13
Pillow==3.2.0
psycopg2==2.6.1
pytz==2016.4
six==1.10.0
static3==0.7.0
whitenoise==3.2
<file_sep>from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
from imagekit.models import ProcessedImageField
from imagekit.processors import Resize
# Create your models here.
class Post(models.Model):
title = models.CharField(max_length=50)
created_date = models.DateTimeField(auto_now_add=True, editable=False)
modified_date = models.DateTimeField(auto_now=True)
content1 = models.TextField(default='', blank=True)
photo1 = ProcessedImageField(blank=True, processors=[Resize(800, 500)])
content2 = models.TextField(default='', blank=True)
photo2 = ProcessedImageField(blank=True, processors=[Resize(800, 500)])
content3 = models.TextField(default='', blank=True)
photo3 = ProcessedImageField(blank=True, processors=[Resize(800, 500)])
content4 = models.TextField(default='', blank=True)
photo4 = ProcessedImageField(blank=True, processors=[Resize(800, 500)])
content5 = models.TextField(default='', blank=True)
photo5 = ProcessedImageField(blank=True, processors=[Resize(800, 500)])
tags = models.CharField(max_length=200, default='', blank=True)
def __str__(self):
return self.title
class Project(models.Model):
name = models.CharField(max_length=50)
tech_list = models.TextField(default='', blank=True)
created_date = models.DateTimeField(auto_now_add=True, editable=False)
modified_date = models.DateTimeField(auto_now=True)
content1 = models.TextField(default='', blank=True)
content2 = models.TextField(default='', blank=True)
content3 = models.TextField(default='', blank=True)
content4 = models.TextField(default='', blank=True)
photo1 = ProcessedImageField(blank=True, processors=[Resize(800, 500)])
photo2 = ProcessedImageField(blank=True, processors=[Resize(800, 500)])
photo3 = ProcessedImageField(blank=True, processors=[Resize(800, 500)])
photo4 = ProcessedImageField(blank=True, processors=[Resize(800, 500)])
link = models.CharField(max_length=100, default='', blank=True)
categories = models.CharField(max_length=200, default='', blank=True)
def __str__(self):
return self.name
|
fa722747c8d931508fdbbf2e4fe7b5e9f0315b17
|
[
"Python",
"Text"
] | 5
|
Python
|
ryanbabida/babida-website
|
ccc0b0030e4bf56caf835a4a83dc0efd4d19bc0c
|
b00f2a8f4a4105893258a45003e6aed41c1dcbbb
|
refs/heads/master
|
<file_sep>// Wow
var Wow = function() {
'use strict';
// Handle Wow
var handleWow = function() {
var wow = new WOW({
boxClass: 'wow', // default
animateClass: 'animated', // default
offset: 0, // default
mobile: true,
live: true // default
});
wow.init();
}
return {
init: function() {
handleWow(); // initial setup for Wow
}
}
}();
$(document).ready(function() {
Wow.init();
});
|
a0885914064e72b5f94e678f2a01c4066c32e66d
|
[
"JavaScript"
] | 1
|
JavaScript
|
onestopgraphic/oneStop
|
f374f57909ab589bc3b8ac51a497560dfcccbe8a
|
0b8e38de2f403ff9b87288964e06a33769a7aad0
|
refs/heads/master
|
<repo_name>johan--/blog<file_sep>/server/routes/authors.js
/**
@module app
@submodule routes/authors
@requires app, rethinkdb_adapter
**/
var debug = require('debug')('authors');
var node_env = process.env.NODE_ENV || 'development';
var db = require('../lib/rethinkdb_adapter');
/**
Exports {Function} routes for Post resource
@main routes/authors
@param {Object} app - express application instance
@param {Function} restrict - middleware, for protected routes
**/
module.exports = function(app, restrict) {
/**
Create a post
Route: (verb) POST /authors
@async
**/
app.post('/authors', restrict, function (req, res) {
db.createRecord('authors', req.body.post, function (err, payload) {
if (err) {
debug(err);
res.send(500);
} else {
res.status(201).send(payload);
}
});
});
/**
(Read) Find authors accepts query object
Route: (verb) GET /authors
@async
**/
app.get('/authors', function (req, res) {
db.findQuery('authors', req.query, function (err, payload) {
if (err) {
debug(err);
res.send(500);
} else {
if (node_env != 'development') {
res.header('Cache-Control', 'public, max-age=' + (30 * 24 * 60 * 60));
}
res.send(payload);
}
});
});
/**
(Read) Find a post by id
Route: (verb) GET /authors/:id
@async
**/
app.get('/authors/:id', function (req, res) {
db.find('authors', req.params.id, function (err, payload) {
if (err) {
debug(err);
res.send(500);
} else {
if (node_env != 'development') {
res.header('Cache-Control', 'public, max-age=' + (30 * 24 * 60 * 60));
}
res.send(payload);
}
});
});
/**
Update a post by id
Route: (verb) PUT /authors/:id
@async
**/
app.put('/authors/:id', restrict, function (req, res) {
db.updateRecord('authors', req.params.id, req.body.post, function (err, payload) {
if (err) {
debug(err);
res.send(500);
} else {
res.send(payload);
}
});
});
/**
Patch a post by id
Route: (verb) PATCH /authors/:id
@async
**/
app.patch('/authors/:id', restrict, function (req, res) {
db.patchRecord('authors', req.params.id, req.body, function (err, payload) {
if (err) {
debug(err);
res.status(500).end();
} else {
res.status(204).end();
}
});
});
/**
Delete a post by id
Route: (verb) DELETE /authors/:id
@async
**/
app.delete('/authors/:id', restrict, function (req, res) {
db.deleteRecord('authors', req.params.id, function (err) {
if (err) {
debug(err);
res.send(500);
} else {
res.send(204); // No Content
}
});
});
};
<file_sep>/client/app/serializers/application.js
import JSONAPISerializer from 'orbit-common/jsonapi-serializer';
export default JSONAPISerializer.extend({
normalize: function (type, data) {
return this._super(type, data);
},
deserialize: function(type, data) {
this.assignMeta(type, data);
data = this.deserializeRelations(type, data);
return this._super(type, data);
},
assignMeta: function (type, data) {
if (!data.meta) {
return;
}
var meta = this.schema.meta;
if (!meta.get(type)) {
meta.set(type, Ember.Object.create());
}
var metaByType = meta.get(type);
metaByType.set('total', data.meta.total);
},
deserializeRelations: function (type, data) {
if (type === 'post') {
data = this.deserializePosts(data);
} else if (type === 'author') {
data = this.deserializeAuthors(data);
}
return data;
},
deserializePosts: function (data) {
var posts = data.posts;
if (Array.isArray(posts)) {
for (var i = posts.length - 1; i >= 0; i--) {
posts[i].author_id = posts[i].links.author;
delete posts[i].links;
}
} else if (typeof posts === "object") {
posts.author_id = posts.links.author;
delete posts.links;
}
return data;
},
deserializeAuthors: function (data) {
var authors = data.authors;
if (Array.isArray(authors)) {
for (var i = authors.length - 1; i >= 0; i--) {
authors[i].post_ids = authors[i].links.posts;
delete authors[i].links;
}
} else if (typeof authors === "object") {
authors.post_ids = authors.links.posts;
delete authors.links;
}
return data;
},
serialize: function (type, data) {
data = this.serializeRelations(type, data);
return this._super(type, data);
},
serializeRelations: function (type, data) {
if (type === 'post') {
data = this.serializePosts(data);
} else if (type === 'author') {
data = this.serializeAuthors(data);
}
return data;
},
serializePosts: function (data) {
var posts = data.posts;
if (Array.isArray(posts)) {
for (var i = posts.length - 1; i >= 0; i--) {
posts[i].links = { author: posts[i].author_id };
delete posts[i].author_id;
posts(posts[i]);
}
} else if (typeof posts === "object") {
posts.links = { author: posts.author_id };
delete posts.author_id;
sanitize(posts);
}
return data;
},
serializeAuthors: function (data) {
var authors = data.authors;
if (Array.isArray(authors)) {
for (var i = authors.length - 1; i >= 0; i--) {
authors[i].links = { posts: authors[i].post_ids };
delete authors[i].post_ids;
sanitize(authors[i]);
}
} else if (typeof posts === "object") {
authors.links = { posts: authors.post_ids };
delete authors.post_ids;
sanitize(authors);
}
return data;
},
});
function sanitize(record) {
var keys = "meta normalized rel rev".w().map(function (key) {
return "__%@".fmt(key);
});
for (var i = 0; i < keys.length; i++) {
if (record.hasOwnProperty(keys[i])) {
delete record[keys[i]];
}
}
return record;
}
<file_sep>/client/app/models/author.js
import EO from "ember-orbit";
import hasManyProxy from "../utils/has-many-proxy";
var attr = EO.attr;
export default EO.Model.extend({
name: attr('string'),
email: attr('string'),
// IDs for related posts is expected in the JSON payload
post_ids: attr(),
// Computed property which manages related promise proxy object
posts: hasManyProxy('post')
});
<file_sep>/client/app/mixins/admin-actions.js
import Ember from 'ember';
export default Ember.Mixin.create({
actions: {
save: function () {
var model = this.modelFor(this.get('routeName'));
var type = model.resourceName;
this.store.then(function () {
this.transitionTo('admin.index');
}.bind(this));
this.store.add(type, model.toJSON());
},
cancel: function () {
this.transitionTo('admin.index');
},
destroy: function (model) {
var type = this.get('resourceName');
// TODO REVEIW, Not thenable ?
// this.store.remove(type, model.get('id')).then(function () { }.bind(this));
this.store.remove(type, model.get('id'));
this.preventScroll = true;
this.modelFor('application').removeObject(model);
this.refresh();
}
}
});
<file_sep>/client/app/models/post.js
import EO from "ember-orbit";
import hasOneProxy from "../utils/has-one-proxy";
var attr = EO.attr;
var Post = EO.Model.extend({
slug: attr('string'),
title: attr('string'),
date: attr('date'),
excerpt: attr('string'),
body: attr('string'),
// ID for related author is expected in the JSON payload
author_id: attr(),
// Computed property which manages related promise proxy object
author: hasOneProxy('author'),
resourceName: 'post'
});
Post.reopenClass({
newRecord: function () {
return Ember.Object.create({
slug: '',
title: '',
date: null,
excerpt: '',
body: '',
author_id: null,
toJSON: function () {
var props = "slug title date excerpt body author_id".w();
return this.getProperties(props);
},
resourceName: 'post'
});
}
});
export default Post;
<file_sep>/client/app/routes/admin/edit.js
import Ember from 'ember';
import ResetScroll from '../../mixins/reset-scroll';
import AdminActions from '../../mixins/admin-actions';
export default Ember.Route.extend(ResetScroll, AdminActions, {
resourceName: 'post',
model: function (params) {
return this.store.find(this.get('resourceName'), params.edit_id);
},
setupController: function (controller, model) {
this._super(controller, model);
controller.set('isEditing', true);
}
});
<file_sep>/server/tests/posts_find_test.js
var app = require(__dirname + '/../app.js'),
port = 8888,
assert = require('assert'),
request = require('supertest');
var testData = require('../seeds/posts.js');
var newestDate = testData[0].date.toISOString();
var oldestDate = testData[8].date.toISOString();
describe('Posts', function () {
before(function (done) {
this.server = app.listen(port, function (err, result) {
if (err) {
done(err);
} else {
done();
}
});
});
after(function () {
this.server.close();
});
describe('GET responses:', function () {
describe('/posts', function () {
describe('root key of payload', function () {
it('is "posts"', function (done) {
request(app)
.get('/posts')
.set('Accept', 'application/json')
.expect('Content-Type', /json/)
.expect(200)
.expect(/posts/)
.expect(function (res) {
var posts = res.body.posts;
if (!posts) throw new Error('expected posts');
})
.end(handleDone(done));
});
it('sorts in DESC order of "date" property by default', function (done) {
request(app)
.get('/posts')
.expect(function (res) {
var posts = res.body.posts;
if (posts[0].date !== newestDate) throw new Error('expected desc order ('+ newestDate +')');
if (posts[8].date !== oldestDate) throw new Error('expected desc order ('+ oldestDate +')');
})
.end(handleDone(done));
});
it('has an array of posts with keys for author, body, date, excerpt, title, id', function (done) {
request(app)
.get('/posts')
.expect(/author/).expect(/body/).expect(/date/).expect(/excerpt/).expect(/title/).expect(/id/)
.expect(function (res) {
var posts = res.body.posts;
if (posts[0].title !== testData[0].title) throw new Error('expected first post title');
if (posts[8].title !== testData[8].title) throw new Error('expected last post title');
})
.end(handleDone(done));
});
it('includes two ('+ testData.length +') posts (from seed data)', function (done) {
request(app)
.get('/posts')
.expect(function (res) {
var posts = res.body.posts;
if (posts.length !== testData.length) throw new Error('expected '+ testData.length +' posts');
})
.end(handleDone(done));
});
});
describe('meta data in payload', function () {
it('includes keys: limit, offset, total, order, sortBy', function (done) {
request(app)
.get('/posts')
.expect(/meta/)
.expect(/limit/).expect(/offset/).expect(/total/)
.expect(/order/).expect(/sortBy/)
.expect(function (res) {
var meta = res.body.meta;
if (!meta) throw new Error('expected meta');
if (meta.total !== testData.length) throw new Error('expected total of '+ testData.length);
})
.end(handleDone(done));
});
});
describe('query parameters', function () {
describe('default sortBy setting is the "date" key', function () {
it('sorts DESC order with "order=desc" param', function (done) {
request(app)
.get('/posts?order=desc')
.expect(function (res) {
var posts = res.body.posts;
if (posts[0].title !== testData[0].title) throw new Error('expected 1st post title');
if (posts[0].date !== newestDate) throw new Error('expected desc order ('+ newestDate +')');
if (posts[8].title !== testData[8].title) throw new Error('expected 2nd post title');
if (posts[8].date !== oldestDate) throw new Error('expected desc order ('+ oldestDate +')');
})
.end(handleDone(done));
});
it('sorts ASC order with "order=asc" param', function (done) {
request(app)
.get('/posts?order=asc')
.expect(function (res) {
var posts = res.body.posts;
if (posts[0].date !== oldestDate) throw new Error('expected asc order ('+ oldestDate +')');
if (posts[8].date !== newestDate) throw new Error('expected asc order ('+ newestDate +')');
})
.end(handleDone(done));
});
it('limits records using "limit=" param', function (done) {
request(app)
.get('/posts?limit=1')
.expect(function (res) {
var posts = res.body.posts;
if (posts.length !== 1) throw new Error('expected 1 of '+ testData.length +' records');
if (posts[0].date !== newestDate) throw new Error('expected '+ oldestDate);
})
.end(handleDone(done));
});
it('skips records using "offset=" param', function (done) {
request(app)
.get('/posts?offset=' + (testData.length - 1).toString())
.expect(function (res) {
var posts = res.body.posts;
if (posts.length !== 1) throw new Error('expected 1 of '+ testData.length +' records');
if (posts[0].date !== oldestDate) throw new Error('expected ' + oldestDate);
})
.end(handleDone(done));
});
});
describe('sortBy param', function () {
describe('default order is DESC', function () {
it('can sortBy "title" key (instead of default "date" key)', function (done) {
var sortedTestData = testData.sort(compareTitle);
request(app)
.get('/posts?sortBy=title')
.expect(function (res) {
var posts = res.body.posts;
if (posts[0].title !== testData[8].title) throw new Error('expected first post title');
if (posts[8].title !== testData[0].title) throw new Error('expected last post title');
})
.end(handleDone(done));
});
});
});
});
});
describe('/posts/:slug', function () {
it('includes one "post" record in the payload', function (done) {
request(app)
.get('/posts?order=desc')
.end(function (err, res) {
if (err) return done(err);
var slug = res.body.posts[0].slug;
request(app)
.get('/posts/' + slug)
.set('Accept', 'application/json')
.expect('Content-Type', /json/)
.expect(200)
.expect(/posts/)
.expect(/author/).expect(/body/).expect(/date/).expect(/excerpt/).expect(/title/).expect(/id/).expect(/slug/)
.expect(function (res) {
if (res.body.posts.length > 1) throw new Error('expected one record');
var post = res.body.posts[0];
if (!post) throw new Error('expected post slug: ' + slug);
if (post.title !== testData[0].title) throw new Error('expected first post title');
})
.end(handleDone(done));
});
});
});
});
});
function handleDone(done) {
return function (err, res) {
if (err) return done(err);
done();
};
}
function compareTitle(a,b) {
if (a.title < b.title) {
return -1;
}
if (a.title > b.title) {
return 1;
}
return 0;
}
<file_sep>/server/lib/socket_adapter.js
/**
@module app
@submodule socket_adapter
db adapter using Socket.io
**/
var db = require('./rethinkdb_adapter');
var debug = require('debug')('socket_adapter');
/**
Exports setup function
@param {Object} express server
@return {Object} `io` socket.io instance
**/
module.exports = function(server) {
// options: https://github.com/Automattic/engine.io#methods-1
var options = {
'transports': ['websocket', 'polling'],
'cookie': 'connect.sid'
};
var io = require('socket.io')(server, options);
io.on('connection', function (socket) {
// Simple sanity check for client to confirm socket is working
socket.emit('hello', { hello: 'world' });
socket.on('talk-to-me', function (data, cb) {
console.log(data);
cb(data);
});
socket.on('findQuery', findQuery);
socket.on('find', find);
socket.on('add', function (payload, callback) {
var _callback = function (_payload) {
callback(_payload);
io.emit('didAdd', _payload);
};
createRecord(payload, _callback);
});
socket.on('patch', function (operation, callback) {
var _callback = function (error, _payload) {
if (error) {
console.log('Patch Error!', error);
callback({errors: error});
} else {
console.log('didPatch...', _payload);
callback(_payload);
io.emit('didPatch', _payload);
}
};
patch(operation, _callback);
});
socket.on('disconnect', function () {
io.emit('error', 'User disconnected');
});
});
return io;
};
/**
findQuery - uses query to find resources
@param {String} JSON strigified query object `resource` property is required
@param {Function} callback
@private
**/
function findQuery(query, callback) {
console.log('findQuery...', query);
if (typeof query === 'string') {
query = JSON.parse(query);
}
var resource = query.resource;
delete query.resource;
var _cb = callback;
db.findQuery(resource, query, function (err, payload) {
if (err) {
console.error(err);
payload = { errors: { code: 500, error: 'Server failure' } };
}
_cb(payload);
});
}
/**
find - uses query to find resources by id or slug
@param {String} JSON strigified query object requires `resource`, `id` properties
@param {Function} callback
@private
**/
function find(query, callback) {
console.log('find...', query);
if (typeof query === 'string') {
query = JSON.parse(query);
}
var resource = query.resource;
delete query.resource;
var id = query.id;
delete query.id;
var _cb = callback;
var errorPayload = { errors: { code: 500, error: 'Server failure' } };
db.find(resource, id, function (err, payload) {
if (err) {
debug(err);
_cb(errorPayload);
} else {
if (payload.posts !== null) {
debug('/posts/:id result not null', payload.posts);
_cb(payload);
} else {
debug('/posts/:id result null, finding by slug');
db.findBySlug('posts', id, function (err, payload) {
if (err) {
debug(err);
_cb(errorPayload);
} else {
if (payload.posts !== null) {
debug('/posts/:slug result not null', payload.posts);
_cb(payload);
} else {
debug('/posts/:slug result not found');
_cb({ errors: { code: 404, error: 'Not Found' } });
}
}
});
}
}
});
}
function createRecord(payload, callback) {
console.log('createRecord...', payload);
if (typeof payload === 'string') {
payload = JSON.parse(payload);
}
var typeKey = pluralize(payload.type);
delete payload.type;
var _cb = callback;
db.createRecord(typeKey, payload[typeKey], function (err, payload) {
if (err) {
console.error(err);
payload = { errors: { code: 500, error: 'Server failure' } };
}
_cb(payload);
});
}
function patch(operation, callback) {
console.log('patch...', operation);
if (typeof operation === 'string') {
operation = JSON.parse(operation);
}
var path = operation.path.split('/');
var type = pluralize(path[1]);
var id = path[2];
var prop = path[3]; // TODO support sub-path
var payload = {};
if (operation.op === 'replace') {
payload[prop] = operation.value;
db.updateRecord(type, id, payload, callback);
} else if (operation.op === 'remove') {
db.deleteRecord(type, id, callback);
}
}
// TODO Use Ember.Inflector or other Inflector?
function singularize(name) {
return name.slice(0, name.length - 1);
}
function pluralize(name) {
return name + 's';
}
<file_sep>/client/README.md
# Ember App Builder - Client
One repository for end-to-end JavaScript application development using two apps.
This is the toolset for client-side application development.
## Getting Started
* Execute `make install` to fetch dependendies.
* See [ember-cli] for more info
[ember-cli]: http://ember-cli.com
### Static File Server
* Execute `make server` to launch a static file server for the client
app. Files in the app and vendor directories are watched for
referching your browser after each change triggers a build.
## Makefile
See [Makefile](Makefile) for tasks
* `make install` - fetch dependencies and setup
* `make build` = Build app using Brunch.io
* `make server` - Starts server for client app
* `make test` - Launch Testem to execute tests, see testem.json
## Build
Use `make build` (default is 'development')
* See [ember-cli]
## Depedencies
* [bower.json](bower.json)
* [package.json](package.json)
### Canary
Use bower.json for canary options, See [ember-cli].
Alternatively, use a shell script [bin/canary.sh](bin/canary.sh) to fetch Ember
Canary and Ember Data Canary (in 'vendor/development/'); also removes copied
vendor file for Ember and Ember Data.
## Testing
1. Start db, see Makefile in server directory
1. Start API server, (see ^), If needed, seed the db first
1. `make test` launches testem and browsers to test in dev
## Code Quality
* [jshint options]
[jshint options]: http://jshint.com/docs/options/
## Links
* [ember-cli]
## Sockets
Emit, with last argument as callback function to receive data.
beforeModel: function () {
var socket = this.socket;
// sanity check, is socket working?
socket.on('hello', function (data) {
console.log(data);
socket.emit('talk-to-me', 'I like talking.', function (msg) {
console.log('back talk', msg);
});
});
}
## ROADMAP
1. Sync data stores memory -> localStorage -> socket
2. Assist/rescue memory.find w/ strategy: if not in memory, check localStorage and if not there ask socket to find.
3. Finish SocketSource need to handle add (create record), remove (delete record), update (patch record) for admin tasks (may need a replace action too)
4. Add Stats collection and track stats for each post record
5. Add ‘trends’ link in nav to list trending posts (most viewed by month)
6. Sync index of records w/ remote storage via socket to get missing records (receive new resources not in memory or localStorage)
7. Using index chuck load of records for index pages like blog index (home) and archives (this already works on master via REST, but tabled to re-implement after transition to sockets/orbit.js)
8. Add middleware to socket for authenticated message (cookie session) for add, remove, update, patch, replace actions
Note: 2 and 3 above may require work in orbit.js or perhaps a transform method
## Thanks
For providing examples of building with Ember.js:
* [ember-cli]
* [tildeio/bloggr-client]
* [orbitjs/orbit.js]
[tildeio/bloggr-client]: https://github.com/tildeio/bloggr-client
[orbitjs/orbit.js]: https://github.com/orbitjs/orbit.js
<file_sep>/client/app/routes/application.js
import Ember from 'ember';
import PushSupport from '../mixins/push-support';
var ApplicationRoute = Ember.Route.extend(PushSupport, {
model: function () {
return this.store.find('post');
},
setupController: function (controller, model) {
this._super(controller, model);
this.canTransition = false;
Ember.$.get(this.get('sessionUrl'))
.done(loginSuccess.bind(this));
},
sessionUrl: (function() {
var uri = [ PixelhandlerBlogENV.API_HOST ];
if (PixelhandlerBlogENV.API_PATH) { uri.push(PixelhandlerBlogENV.API_PATH); }
uri.push('sessions');
return uri.join('/');
}()),
actions: {
login: function () {
var controller = this.get('controller');
this.canTransition = true;
Ember.$.ajax({
url: this.get('sessionUrl'),
type: 'POST',
data: JSON.stringify({
username: controller.get('username'),
password: <PASSWORD>('<PASSWORD>')
}),
dataType: 'text',
contentType: 'application/json'
})
.done(loginSuccess.bind(this))
.fail(loginFailure.bind(this));
return false;
},
logout: function () {
Ember.$.ajax({
url: this.get('sessionUrl'),
type: 'DELETE'
})
.done(logoutSuccess.bind(this))
.fail(logoutFailure.bind(this));
}
}
});
function loginSuccess(/*data, status, xhr*/) {
var controller = this.get('controller');
Ember.run(function () {
this.setProperties({ 'isLoggedIn': true, 'password': <PASSWORD>, 'error': null });
}.bind(controller));
if (this.canTransition) {
this.transitionTo('admin.index');
}
}
function loginFailure(xhr, status, error) {
var controller = this.get('controller');
xhr = xhr || void 0;
status = status || void 0;
Ember.run(function () {
this.setProperties({ 'error': error, 'password': null });
}.bind(controller));
}
function logoutSuccess(/*data, status, xhr*/) {
var controller = this.get('controller');
Ember.run(function () {
this.setProperties({ 'isLoggedIn': false, 'username': null, 'error': null, 'showLogin': false });
}.bind(controller));
this.transitionTo('index');
}
function logoutFailure(xhr, status, error) {
xhr = xhr || void 0;
status = status || void 0;
var controller = this.get('controller');
Ember.run(function () {
this.setProperties({ 'error': error });
}.bind(controller));
}
export default ApplicationRoute;
<file_sep>/client/app/controllers/posts/index.js
import Ember from 'ember';
export default Ember.ArrayController.extend({
// flag to show button for more
hasMore: null
});
<file_sep>/client/app/adapters/socket-source.js
import Ember from 'ember';
import Orbit from 'orbit';
import OC from 'orbit-common';
import SocketService from '../services/socket';
import JSONAPISerializer from 'orbit-common/jsonapi-serializer';
Orbit.Promise = Orbit.Promise || Ember.RSVP.Promise;
var assert = Orbit.assert;
var Source = OC.Source;
var SocketSource = Source.extend({
init: function (schema, options) {
assert('SocketSource requires SocketService be defined', SocketService);
assert('SocketSource requires Orbit.Promise be defined', Orbit.Promise);
this._socket = SocketService.create();
options = options || {};
if (!options.skipDefaultSerializer) {
var DefaultSerializerClass = options.defaultSerializerClass || JSONAPISerializer;
this.defaultSerializer = new DefaultSerializerClass(schema);
}
return OC.Source.prototype.init.apply(this, arguments);
},
_transform: function(operation) {
var methodName = '_transform' + operation.op.capitalize();
if (typeof this[methodName] === 'function') {
return this[methodName](operation);
} else {
throw new Error(methodName + ' not implmented');
}
},
_transformAdd: function (operation) {
var path = operation.path;
var data = operation.value;
var type = path[0];
var socket = this._socket;
var payload = {};
var key = pluralize(type);
payload[key] = data;
payload = this.serialize(type, payload);
payload[key] = payload[key][key]; // TODO FIXME
delete payload[key].id; // Remove client ID created by Orbit.js
payload.type = type;
var _this = this;
return new Orbit.Promise(function (resolve, reject) {
try {
var didAdd = responseHandlerFactory(_this, type, resolve, reject);
socket.emit('add', JSON.stringify(payload), didAdd);
} catch (e) {
var msg = 'SocketSource#transform (op:add) Socket Messaging Error';
console.log(msg, e);
throw new Error(msg, e);
}
});
},
_transformReplace: function (operation) {
return this._transformPatch(operation);
},
_transformRemove: function (operation) {
return this._transformPatch(operation);
},
_transformPatch: function (operation) {
var socket = this._socket;
var path = operation.path;
var type = path[0];
var _this = this;
return new Orbit.Promise(function (resolve, reject) {
try {
var didPatch = responseHandlerFactory(_this, type, resolve, reject);
if (Array.isArray(operation.path)) { // REVIEW why is this needed?
operation.path = '/' + operation.path.join('/');
}
socket.emit('patch', JSON.stringify(operation), didPatch);
} catch (e) {
var msg = 'SocketSource#transform (op:patch) Socket Messaging Error';
console.log(msg, e);
throw new Error(msg, e);
}
}).then(function () {
_this._transformCache(operation); // REVIEW do we need to call?
});
},
_addRecordsToCache: function(type, records) {
var _this = this;
records.forEach(function(record) {
_this._addRecordToCache(type, record);
});
},
_addRecordToCache: function(type, record) {
this._transformCache({
op: 'add',
path: [type, this.getId(record)],
value: record
});
},
_find: function(type, id) {
if (id && (typeof id === 'number' || typeof id === 'string')) {
return this._findOne(type, id);
} else {
return this._findQuery(type, id);
}
},
_findOne: function (type, id) {
var socket = this._socket;
var query = {resource: type + 's', id: id};
var _this = this;
return new Orbit.Promise(function (resolve, reject) {
try {
var didFind = responseHandlerFactory(_this, type, resolve, reject);
socket.emit('find', JSON.stringify(query), didFind);
} catch (e) {
var msg = 'SocketSource#_find Socket Messaging Error';
console.log(msg, e);
throw new Error(msg, e);
}
});
},
_findQuery: function (type, query) {
var socket = this._socket;
query = query || {};
query.resource = query.resource || pluralize(type);
query = this._queryFactory(query);
var _this = this;
return new Orbit.Promise(function (resolve, reject) {
try {
var didFindQuery = responseHandlerFactory(_this, type, resolve, reject);
socket.emit('findQuery', JSON.stringify(query), didFindQuery);
} catch (e) {
var msg = 'SocketSource#_findQuery Socket Messaging Error';
console.log(msg, e);
throw new Error(msg, e);
}
});
},
_queryFactory: function (query) {
var _this = this;
var attrs = Ember.String.w('limit offset sortBy order resource withFields');
query = query || {};
attrs.forEach(function (attr) {
query[attr] = query[attr] || Ember.get(_this, attr);
});
return query;
},
_patch: function (type, id, property, value) {
return OC.Source.prototype._patch.call(this, type, id, property, value);
},
_remove: function () {
return OC.Source.prototype._remove.apply(this, Array.prototype.slice.call(arguments));
},
_transformCache: function (operation) {
var pathToVerify;
if (operation.op === 'add') {
pathToVerify = operation.path.slice(0, operation.path.length - 1);
} else {
pathToVerify = operation.path;
}
if (this.retrieve(pathToVerify)) {
this._cache.transform(operation);
} else {
this.didTransform(operation, []);
}
},
pathForType: function(type) {
return this.schema.pluralize(type);
},
serializerForType: function(/*type*/) {
return this.defaultSerializer;
},
serialize: function(type, data) {
return this.serializerForType(type).serialize(type, data);
},
deserialize: function(type, data) {
var deserialized = this.serializerForType(type).deserialize(type, data),
records = deserialized[type];
if (this._cache) {
if (Array.isArray(records)) {
this._addRecordsToCache(type, records);
} else {
this._addRecordToCache(type, records);
}
if (deserialized.linked) {
Object.keys(deserialized.linked).forEach(function(relType) {
this._addRecordsToCache(relType, deserialized.linked[relType]);
}, this);
}
}
return records;
}
});
var responseHandlerFactory = function (source, type, resolve, reject) {
return function (payload) {
var root = pluralize(type);
if (payload.errors || !payload[root]) {
reject(payload.errors);
} else {
var data = source.deserialize(type, payload);
return source.settleTransforms().then(function() {
return resolve(data);
});
}
};
};
// TODO use Ember.Inflector https://github.com/stefanpenner/ember-inflector.git
var pluralize = function (name) {
return name + 's';
};
export default SocketSource;
<file_sep>/client/app/mixins/push-support.js
import Ember from 'ember';
export default Ember.Mixin.create({
beforeModel: function () {
this.socketSanityCheck();
this._super();
},
socketSanityCheck: function () {
// Sanity check, is socket working? check output browser console.
var socket = this.socket;
socket.on('hello', function (data) {
console.log(data);
socket.emit('talk-to-me', 'I like talking.', function (msg) {
console.log('back talk', msg);
});
});
},
patchableCollections: "application index postsIndex".w(),
onDidAdd: function () {
try {
this.socket.on('didAdd', this.addToCollections.bind(this));
} catch (e) {
console.log(e);
}
}.on('init'),
addToCollections: function (payload) {
console.log('addToCollections', payload);
// if (!payload.op) { return; } TODO use JSON Patch syntax for payload?
var type = this._extractType(payload);
var typeKey = this.store.schema._schema.pluralize(type);
var model = this.store.retrieve(type, payload[typeKey].id);
if (!model) {
model = Ember.Object.create(payload[typeKey]);
}
this.get('patchableCollections').forEach(function (name) {
try {
var collection = this.modelFor(name);
if (collection) {
collection.insertAt(0, model);
var controller = this.controllerFor(name);
if (controller) {
controller.set('content', collection);
}
}
} catch (e) {
console.log(e);
}
}.bind(this));
},
onDidPatch: function () {
try {
this.socket.on('didPatch', this.patchInCollections.bind(this));
} catch (e) {
console.log(e);
}
}.on('init'),
patchInCollections: function (payload) {
console.log('patchInCollections', payload);
if (payload.op && payload.op === 'remove') {
this.removeFromCollections(payload);
} else if (payload) {
this.updateInStore(payload);
}
},
removeFromCollections: function (payload) {
console.log('removeFromCollections', payload);
var model = this._retrieveModel(payload);
if (model) {
this.get('patchableCollections').forEach(function (name) {
try {
var collection = this.modelFor(name);
if (collection) {
collection.removeObject(model);
var controller = this.controllerFor(name);
if (controller) {
controller.set('content', collection);
}
}
} catch (e) {
console.log(e);
}
}.bind(this));
if (model.constructor.typeKey) {
this.store.remove(model.constructor.typeKey, model.get('id'));
}
}
},
updateInStore: function (payload) {
console.log('updateInStore', payload);
var type = this._extractType(payload);
var typeKey = this.store.schema._schema.pluralize(type);
var model = this.store.retrieve(type, payload[typeKey].id);
// TODO perhaps need to use JSON Patch payload not model as JSON
var record = payload[typeKey];
if (model && record) {
delete record.id;
model.setProperties(payload[typeKey]);
}
},
_extractType: function (payload) {
var models = this.store.schema._schema.models;
var type;
for (var key in payload) {
if (payload.hasOwnProperty(key)) {
key = this.store.schema._schema.singularize(key);
if (models[key]) {
type = key;
continue;
}
}
}
if (!type) {
throw new Error('Cannot extract type');
} else {
return type;
}
},
_retrieveModel: function (payload) {
if (!payload.path) { return undefined; }
var path = payload.path.split('/');
var type = this.store.schema._schema.singularize(path[1]);
var id = path[2];
var model = this.store.retrieve(type, {id: id});
return (model) ? model : undefined;
}
});
<file_sep>/client/app/mixins/record-chunks.js
import Ember from 'ember';
export default Ember.Mixin.create({
/**
Prototype using mixin must define resourceName and controllerName, the
controller must also be defined, can't rely on auto generation
@prop {String} resourceName - name of record type used to lookup via store
@prop (String) controllerName - name of controller to set `hasMore` flag on
**/
resourceName: null,
/**
Prototype using mixin may redefine limit and offset as needed.
**/
limit: 5,
offset: -5,
beforeModel: function () {
this.set('offset', this.get('offset') + this.get('limit'));
},
model: function () {
var query = { offset: this.get('offset'), limit: this.get('limit') };
return this.store.find(this.get('resourceName'), query);
},
afterModel: function (collection) {
//var meta = meta || new Ember.Set();
var loaded = this.get('loadedIds');
collection.mapBy('id').forEach(function (id) {
loaded.push(id);
}.bind(this));
this.set('loadedIds', loaded.uniq());
return collection;
},
setupController: function (controller, collection) {
var type = this.get('resourceName');
collection = [];
this.get('loadedIds').forEach(function (id) {
var model = this.store.retrieve(type, {id: id});
if (model) {
collection.push(model);
}
}.bind(this));
controller.setProperties({
'hasMore': this.get('hasMore'),
'loadingMore': false
});
this._super(controller, collection);
},
loadedIds: [],
hasMore: function () {
var meta = this.get('meta');
if (!meta) {
return false;
}
return this.get('loadedIds').length < meta.get('total');
}.property('loadedIds', 'total').volatile(),
meta: Ember.computed(function () {
var type = this.get('resourceName');
if (!this.store.schema._schema.meta) {
return null;
}
return this.store.schema._schema.meta.get(type);
})
});
<file_sep>/server/tests/posts_update_test.js
var app = require(__dirname + '/../app.js'),
port = 8888,
assert = require('assert'),
request = require('superagent').agent();
var config = require('../config')();
var serverUrl = 'http://localhost:' + port;
describe('Posts', function () {
before(function (done) {
this.server = app.listen(port, function (err, result) {
if (err) {
done(err);
} else {
done();
}
});
});
after(function () {
this.server.close();
});
describe('PUT responses:', function () {
describe('/posts/:id', function () {
it('updates a "post" record, excerpt changed', function (done) {
var cookie;
var credentials = config.admin;
request.post(serverUrl + '/sessions')
.send(credentials)
.end(function (res) {
assert(res.ok);
assert(res.noContent);
cookie = res.headers['set-cookie'];
assert(cookie);
cookie = cookie[0].slice(0, cookie[0].indexOf(';'));
request.get(serverUrl + '/posts?order=desc')
.set('Cookie', cookie)
.end(function (res) {
assert(res.ok);
var slug = res.body.posts[0].slug;
assert(slug);
var payload = { post: res.body.posts[0] };
payload.post.excerpt += " [updatable]";
delete payload.post.id;
request.put(serverUrl + '/posts/' + slug)
.set('Cookie', cookie)
.send(payload)
.end(function (res) {
assert(res.ok);
var post = res.body.posts[0];
assert(post);
assert(post.excerpt.match(/\[updatable\]/));
done();
});
});
});
});
});
});
});
<file_sep>/client/app/routes/admin/create.js
import Ember from 'ember';
import ResetScroll from '../../mixins/reset-scroll';
import AdminActions from '../../mixins/admin-actions';
import Post from '../../models/post';
export default Ember.Route.extend(ResetScroll, AdminActions, {
resourceName: 'post',
model: function () {
return Post.newRecord();
},
afterModel: function (model) {
return this.store.find('author').then(function (authors) {
var id = authors.get('firstObject').get('id');
model.set('author_id', id);
});
},
setupController: function (controller, model) {
this._super(controller, model);
controller.set('dateInput', moment().format('L'));
}
});
|
c203aad0aff9dd9fb3f51a28a44218b18a0be3d6
|
[
"JavaScript",
"Markdown"
] | 16
|
JavaScript
|
johan--/blog
|
3aacbc9da51951d95983f8b2fdaedfee6af31660
|
870b67ad79f627340664cea0b43aeaa15707b3cd
|
refs/heads/master
|
<repo_name>JPlante9117/scraping-flatiron-code-along-online-web-ft-100719<file_sep>/lib/scraper.rb
require 'nokogiri'
require 'open-uri'
require 'pry'
require_relative './course.rb'
class Scraper
def print_courses #makes the courses, and then prints them out in the specific format
self.make_courses
Course.all.each do |course|
if course.title && course.title != ""
puts "Title: #{course.title}"
puts " Schedule: #{course.schedule}"
puts " Description: #{course.description}"
end
end
end
def get_page #uses Nokogiri to get the entire page HTML
doc = Nokogiri::HTML(open('http://learn-co-curriculum.github.io/site-for-scraping/courses'))
#binding.pry
end
def make_courses #accesses the get_courses method, and iterates through all of the courses to create Course class objects
self.get_courses.each do |post|
course = Course.new
course.title = post.css("h2").text
course.schedule = post.css(".date").text
course.description = post.css("p").text
end
end
def get_courses #searches through the page to get all the css with the class post
self.get_page.css('.post')
end
end
|
8a1ab448c16e5c6e66f0f99335e0eb0d7945ddc8
|
[
"Ruby"
] | 1
|
Ruby
|
JPlante9117/scraping-flatiron-code-along-online-web-ft-100719
|
925f72f56468bf8d1799b35a99dae56ff3d93d13
|
d9051bcbc8e6119fd6ea4d37d836659733b0280c
|
refs/heads/main
|
<repo_name>mgalang229/Codechef-Check-Algorithm<file_sep>/sol.cpp
#include <bits/stdc++.h>
#include <ext/pb_ds/assoc_container.hpp>
using namespace std;
using namespace __gnu_pbds;
#define ll long long
#define ar array
typedef tree<int, null_type, less<int>, rb_tree_tag,
tree_order_statistics_node_update> indexed_set;
int main() {
ios::sync_with_stdio(0);
cin.tie(0);
int t;
cin >> t;
while(t--) {
string s;
cin >> s;
bool vis[(int)s.size()+1];
memset(vis, 1, sizeof(vis));
string ns;
for(int i=0; i<(int)s.size(); ++i) {
int cnt=1;
for(int j=i+1; j<(int)s.size()&&s[j]==s[i]; ++j) {
cnt++;
vis[j]=0;
}
if(vis[i]) {
string tmp=to_string(cnt);
ns+=s[i];
ns+=tmp;
}
}
cout << ((int)ns.size()<(int)s.size()?"YES":"NO") << "\n";
}
}
<file_sep>/README.md
# Codechef-Check-Algorithm
Link: https://www.codechef.com/problems/CHEALG
<file_sep>/optimized_sol.cpp
#include <bits/stdc++.h>
#include <ext/pb_ds/assoc_container.hpp>
using namespace std;
using namespace __gnu_pbds;
#define ll long long
#define ar array
typedef tree<int, null_type, less<int>, rb_tree_tag,
tree_order_statistics_node_update> indexed_set;
int main() {
ios::sync_with_stdio(0);
cin.tie(0);
int t;
cin >> t;
while(t--) {
string s;
cin >> s;
int cnt=0;
for(int i=0; i<(int)s.size();) {
int j=0, tmp=0;
for(j=i; j<(int)s.size()&&s[j]==s[i]; ++j)
++tmp;
while(tmp>0) {
cnt++;
tmp/=10;
}
cnt++;
i=j;
}
cout << (cnt<(int)s.size()?"YES":"NO") << "\n";
}
}
|
7f01e2c62d42434ac4cb469c52827e4ffadf6bb3
|
[
"Markdown",
"C++"
] | 3
|
C++
|
mgalang229/Codechef-Check-Algorithm
|
ece77f85388f670949b449057474a5412e9ecd74
|
c7c98d49a950f0cb3f66633c15d64f3757730725
|
refs/heads/master
|
<file_sep>package com.junbaor.huxblog.service;
import com.junbaor.huxblog.common.AppUtils;
import com.junbaor.huxblog.common.CacheManager;
import com.junbaor.huxblog.dao.ArticleMapper;
import com.junbaor.huxblog.dao.ArticleTagMapper;
import com.junbaor.huxblog.dao.TagMapper;
import com.junbaor.huxblog.model.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* Created by junbaor on 2016/5/5.
*/
@Service
public class MainService {
private static final Logger logger = LoggerFactory.getLogger(MainService.class);
@Autowired
private JdbcTemplate jdbcTemplate;
@Autowired
private CacheManager cacheManager;
@Autowired
private ArticleMapper articleMapper;
@Autowired
private TagMapper tagMapper;
@Autowired
private ArticleTagMapper articleTagMapper;
public List<Tag> getTagAll() {
return tagMapper.selectByExample(null);
}
public int saveInfo(Article article) {
ArticleExample example = new ArticleExample();
example.createCriteria().andTagEqualTo(Long.valueOf(1));
List<Article> articles = articleMapper.selectByExample(example);
articleMapper.selectByExample(example);
if (articles.size() == 0) {
article.setBm(AppUtils.getUUID());
article.setTitle("about页面");
article.setTag(Long.valueOf(1));
return articleMapper.insertSelective(article);
} else {
article.setId(articles.get(0).getId());
return articleMapper.updateByPrimaryKeySelective(article);
}
}
public Article getInfo() {
ArticleExample example = new ArticleExample();
example.createCriteria().andTagEqualTo(Long.valueOf(1));
return articleMapper.selectByExample(example).get(0);
}
@Transactional(propagation = Propagation.REQUIRED)
public int saveArticle(Article article, String tags) {
article.setBm(AppUtils.getUUID());
int i = articleMapper.insertSelective(article);
if (AppUtils.isNotBlank(tags)) {
List<Tag> tagList = tagMapper.selectByExample(null);
List<String> tagNames = new ArrayList<String>();
for (Tag tag : tagList) {
tagNames.add(tag.getTagName());
}
String[] split = tags.split(",");
for (String s : split) {
if (!tagNames.contains(s)) {
Tag tag = new Tag();
tag.setBm(AppUtils.getUUID());
tag.setTagName(s);
tagMapper.insertSelective(tag);
}
}
TagExample tagExample = new TagExample();
tagExample.createCriteria().andTagNameIn(Arrays.asList(split));
List<Tag> list = tagMapper.selectByExample(tagExample);
for (Tag tag : list) {
ArticleTag articleTag = new ArticleTag();
articleTag.setTagBm(tag.getBm());
articleTag.setArticleBm(article.getBm());
articleTagMapper.insertSelective(articleTag);
}
}
return i;
}
public Article getArticleOne(String id) {
ArticleExample example = new ArticleExample();
example.createCriteria().andIdEqualTo(Integer.valueOf(id));
return articleMapper.selectByExample(example).get(0);
}
public List<Article> getArticleAll() {
ArticleExample example = new ArticleExample();
example.createCriteria().andTagNotEqualTo(Long.valueOf(1));
example.setOrderByClause("create_date desc");
return articleMapper.selectByExample(example);
}
}
<file_sep># huxblog
### 简介
`jekyll` 静态博客的 `Java` 版本
主题是从 [黄玄](http://huangxuan.me/) 博客扒取的,是国人很喜欢的一套 `jekyll` 主题
### 相关技术
* Sping
* Spring MVC
* MyBatis
* My SQL
* Maven
### 修改说明
* 文章头部的图片改为css实现的纯色
* 后台使用富文本编辑器,不支持 markdown
### 使用说明
* fork 本项目 clone 到本地并修改 `src/main/resources/config.properties` 文件 (详见注释)
* 将 `init.sql` 导入到自己的数据库中
* 执行 `mvn tomcat7:run` 命令,在浏览器中访问 `http://127.0.0.1:8080/huxblog`
* 如果要部署到 tomcat 之类的中间件可以运行 `mvn package` 在 target 目录下会生成 `huxblog.war`
> 后台地址:`http://127.0.0.1:8080/huxblog/admin`, 需要使用 config.properties 中配置的用户名密码登录
### 截图





<file_sep>CREATE DATABASE huxblog DEFAULT CHARACTER SET utf8;
DROP TABLE IF EXISTS `huxblog_article`;
CREATE TABLE `huxblog_article` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`bm` varchar(100) DEFAULT NULL,
`title` varchar(100) DEFAULT NULL,
`title_sub` varchar(100) DEFAULT NULL,
`content` text,
`create_date` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
`tag` decimal(10,0) DEFAULT '0' COMMENT '0博客文章1个人简介',
`theme` varchar(30) DEFAULT '#293696' COMMENT '文章头部颜色',
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8;
insert into `huxblog_article`(`id`,`bm`,`title`,`title_sub`,`content`,`create_date`,`tag`,`theme`) values (1,'3D9540497587418DA8787AC839829306','about页面',NULL,'<p>你可以在这里编辑一些介绍自己的文字。</p><p><br></p>','2016-05-06 17:25:15','1','#293696'),(2,'D757831311144077AA01D06B4D8246C7','Hello Word','第一篇博文','<p>如果你能看到这篇文章说明博客已经正确安装。</p>','2016-05-06 17:26:39','0','#293696');
DROP TABLE IF EXISTS `huxblog_article_tag`;
CREATE TABLE `huxblog_article_tag` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`article_bm` varchar(100) DEFAULT NULL,
`tag_bm` varchar(100) DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8;
insert into `huxblog_article_tag`(`id`,`article_bm`,`tag_bm`) values (1,'D<KEY>','<KEY>'),(2,'D<KEY>','A02DD40894F249C6A981C382E9295911');
DROP TABLE IF EXISTS `huxblog_tag`;
CREATE TABLE `huxblog_tag` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`bm` varchar(100) DEFAULT NULL,
`tag_name` varchar(100) DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8;
insert into `huxblog_tag`(`id`,`bm`,`tag_name`) values (1,'776A67<KEY>C338B','技术'),(2,'A02DD40894F249C6A981C382E9295911','入门');
<file_sep>package com.junbaor.huxblog.model;
public class ArticleTag {
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column huxblog_article_tag.id
*
* @mbggenerated
*/
private Integer id;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column huxblog_article_tag.article_bm
*
* @mbggenerated
*/
private String articleBm;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column huxblog_article_tag.tag_bm
*
* @mbggenerated
*/
private String tagBm;
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column huxblog_article_tag.id
*
* @return the value of huxblog_article_tag.id
*
* @mbggenerated
*/
public Integer getId() {
return id;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column huxblog_article_tag.id
*
* @param id the value for huxblog_article_tag.id
*
* @mbggenerated
*/
public void setId(Integer id) {
this.id = id;
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column huxblog_article_tag.article_bm
*
* @return the value of huxblog_article_tag.article_bm
*
* @mbggenerated
*/
public String getArticleBm() {
return articleBm;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column huxblog_article_tag.article_bm
*
* @param articleBm the value for huxblog_article_tag.article_bm
*
* @mbggenerated
*/
public void setArticleBm(String articleBm) {
this.articleBm = articleBm == null ? null : articleBm.trim();
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column huxblog_article_tag.tag_bm
*
* @return the value of huxblog_article_tag.tag_bm
*
* @mbggenerated
*/
public String getTagBm() {
return tagBm;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column huxblog_article_tag.tag_bm
*
* @param tagBm the value for huxblog_article_tag.tag_bm
*
* @mbggenerated
*/
public void setTagBm(String tagBm) {
this.tagBm = tagBm == null ? null : tagBm.trim();
}
}<file_sep>package com.junbaor.huxblog.dao;
import com.junbaor.huxblog.model.ArticleTag;
import com.junbaor.huxblog.model.ArticleTagExample;
import java.util.List;
import org.apache.ibatis.annotations.Param;
public interface ArticleTagMapper {
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table huxblog_article_tag
*
* @mbggenerated
*/
int countByExample(ArticleTagExample example);
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table huxblog_article_tag
*
* @mbggenerated
*/
int deleteByExample(ArticleTagExample example);
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table huxblog_article_tag
*
* @mbggenerated
*/
int deleteByPrimaryKey(Integer id);
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table huxblog_article_tag
*
* @mbggenerated
*/
int insert(ArticleTag record);
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table huxblog_article_tag
*
* @mbggenerated
*/
int insertSelective(ArticleTag record);
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table huxblog_article_tag
*
* @mbggenerated
*/
List<ArticleTag> selectByExample(ArticleTagExample example);
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table huxblog_article_tag
*
* @mbggenerated
*/
ArticleTag selectByPrimaryKey(Integer id);
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table huxblog_article_tag
*
* @mbggenerated
*/
int updateByExampleSelective(@Param("record") ArticleTag record, @Param("example") ArticleTagExample example);
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table huxblog_article_tag
*
* @mbggenerated
*/
int updateByExample(@Param("record") ArticleTag record, @Param("example") ArticleTagExample example);
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table huxblog_article_tag
*
* @mbggenerated
*/
int updateByPrimaryKeySelective(ArticleTag record);
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table huxblog_article_tag
*
* @mbggenerated
*/
int updateByPrimaryKey(ArticleTag record);
}<file_sep>package com.junbaor.huxblog.model;
import java.util.Date;
import java.util.List;
public class Article {
private List<Tag> tagList;
public List<Tag> getTagList() {
return tagList;
}
public void setTagList(List<Tag> tagList) {
this.tagList = tagList;
}
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column huxblog_article.id
*
* @mbggenerated
*/
private Integer id;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column huxblog_article.bm
*
* @mbggenerated
*/
private String bm;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column huxblog_article.title
*
* @mbggenerated
*/
private String title;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column huxblog_article.title_sub
*
* @mbggenerated
*/
private String titleSub;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column huxblog_article.content
*
* @mbggenerated
*/
private String content;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column huxblog_article.create_date
*
* @mbggenerated
*/
private Date createDate;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column huxblog_article.tag
*
* @mbggenerated
*/
private Long tag;
/**
* This field was generated by MyBatis Generator.
* This field corresponds to the database column huxblog_article.theme
*
* @mbggenerated
*/
private String theme;
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column huxblog_article.id
*
* @return the value of huxblog_article.id
*
* @mbggenerated
*/
public Integer getId() {
return id;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column huxblog_article.id
*
* @param id the value for huxblog_article.id
*
* @mbggenerated
*/
public void setId(Integer id) {
this.id = id;
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column huxblog_article.bm
*
* @return the value of huxblog_article.bm
*
* @mbggenerated
*/
public String getBm() {
return bm;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column huxblog_article.bm
*
* @param bm the value for huxblog_article.bm
*
* @mbggenerated
*/
public void setBm(String bm) {
this.bm = bm == null ? null : bm.trim();
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column huxblog_article.title
*
* @return the value of huxblog_article.title
*
* @mbggenerated
*/
public String getTitle() {
return title;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column huxblog_article.title
*
* @param title the value for huxblog_article.title
*
* @mbggenerated
*/
public void setTitle(String title) {
this.title = title == null ? null : title.trim();
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column huxblog_article.title_sub
*
* @return the value of huxblog_article.title_sub
*
* @mbggenerated
*/
public String getTitleSub() {
return titleSub;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column huxblog_article.title_sub
*
* @param titleSub the value for huxblog_article.title_sub
*
* @mbggenerated
*/
public void setTitleSub(String titleSub) {
this.titleSub = titleSub == null ? null : titleSub.trim();
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column huxblog_article.content
*
* @return the value of huxblog_article.content
*
* @mbggenerated
*/
public String getContent() {
return content;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column huxblog_article.content
*
* @param content the value for huxblog_article.content
*
* @mbggenerated
*/
public void setContent(String content) {
this.content = content == null ? null : content.trim();
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column huxblog_article.create_date
*
* @return the value of huxblog_article.create_date
*
* @mbggenerated
*/
public Date getCreateDate() {
return createDate;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column huxblog_article.create_date
*
* @param createDate the value for huxblog_article.create_date
*
* @mbggenerated
*/
public void setCreateDate(Date createDate) {
this.createDate = createDate;
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column huxblog_article.tag
*
* @return the value of huxblog_article.tag
*
* @mbggenerated
*/
public Long getTag() {
return tag;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column huxblog_article.tag
*
* @param tag the value for huxblog_article.tag
*
* @mbggenerated
*/
public void setTag(Long tag) {
this.tag = tag;
}
/**
* This method was generated by MyBatis Generator.
* This method returns the value of the database column huxblog_article.theme
*
* @return the value of huxblog_article.theme
*
* @mbggenerated
*/
public String getTheme() {
return theme;
}
/**
* This method was generated by MyBatis Generator.
* This method sets the value of the database column huxblog_article.theme
*
* @param theme the value for huxblog_article.theme
*
* @mbggenerated
*/
public void setTheme(String theme) {
this.theme = theme == null ? null : theme.trim();
}
}<file_sep>package com.junbaor.huxblog.common;
import java.util.UUID;
/**
* Created by junbaor on 2016/5/5.
*/
public class AppUtils {
public static boolean isBlank(String str) {
if (str == null) return true;
if (str.length() == 0) return true;
return false;
}
public static boolean isNotBlank(String str) {
return !isBlank(str);
}
public static String getUUID() {
return UUID.randomUUID().toString().replace("-", "").toUpperCase();
}
}
|
704721a1dbb81fb6f4ee97ca933ce63408db136c
|
[
"Markdown",
"Java",
"SQL"
] | 7
|
Java
|
junbaor/huxblog
|
0cce8a00b59f6c135edd5d0c656558351c2af81b
|
dc105e428f2c0d37342ab4e0235c083782e74bad
|
refs/heads/main
|
<file_sep>package com.step.model.mobila;
public class Noptiera {
private String culoare;
private int inaltime;
public int getInaltime(){
return inaltime;
}
public void setInaltime(int inaltime){
this.inaltime=inaltime;
}
public String getCuloare() {
return culoare;
}
public void setCuloare(String culoare) {
this.culoare = culoare;
}
}
<file_sep>package array;
import java.util.Scanner;
public class Main {
public static void main(String[] args)
{
Array array= new Array();
array.setSize();
array.setArray();
array.showArray();
System.out.println("elementul maxim="+array.getMax());
System.out.println("elementul minim="+array.getMin());
}
}
<file_sep>package com.step.model.tehnica;
public class Cazan {
private String stare="Off";
private int temperatura=0;
public int getTemperatura() {
return temperatura;
}
public void setTemperatura(int temperatura) {
if (temperatura<0)
this.temperatura = 0;
else if(temperatura>100)
this.temperatura = 100;
else
this.temperatura = temperatura;
}
public String getStare() {
return stare;
}
public void stareOff(){
this.stare="Off";
}
public void stareIarna(){
this.stare="Iarna";
}
public void stareVara(){
this.stare="Vara";
}
}
<file_sep>package com.step.model.tehnica;
public class Arzator {
private String putere="1900 W";
private int temperatura=0;
public String getPutere() {
return putere;
}
public int getTemperatura() {
return temperatura;
}
public void setTemperatura(int temperatura) {
if(temperatura<0)
this.temperatura =0;
else if(temperatura>1900)
this.temperatura =1900;
else
this.temperatura = temperatura;
}
}
<file_sep>package com.step.model.mobila;
public class Dulap {
private String sertar;
private String usa;
public void deschideSertar(){
sertar="deschis";
}
public void inchideSertar(){
sertar="inchis";
}
public void deschideUsa(){
usa="deschis";
}
public void inchideUsa(){
usa="inchis";
}
public String getSertar() {
return sertar;
}
public void setSertar(String sertar) {
this.sertar = sertar;
}
public String getUsa() {
return usa;
}
public void setUsa(String usa) {
this.usa = usa;
}
}
<file_sep>package array;
import java.util.Scanner;
public class Main {
public static void main(String[] args) {
char[] a =new char[10];
Scanner sc = new Scanner(System.in);
for(int i=0;i<10;i++) {
System.out.print("Dati a[" + i + "]=");
a[i] = sc.nextLine().charAt(0);
}
for(int i=0;i<10;i++) {
switch(a[i]){
case 'a' :
System.out.println(a[i]+" Vocala"); break;
case 'e' :
System.out.println(a[i]+" Vocala"); break;
case 'i' :
System.out.println(a[i]+" Vocala"); break;
case 'o' :
System.out.println(a[i]+" Vocala"); break;
case 'u' :
System.out.println(a[i]+" Vocala"); break;
case 'y' :
System.out.println(a[i]+" Vocala"); break;
case 'A' :
System.out.println(a[i]+" Vocala"); break;
case 'E' :
System.out.println(a[i]+" Vocala"); break;
case 'I' :
System.out.println(a[i]+" Vocala"); break;
case 'O' :
System.out.println(a[i]+" Vocala"); break;
case 'U' :
System.out.println(a[i]+" Vocala"); break;
case 'Y' :
System.out.println(a[i]+" Vocala"); break;
default:
System.out.println(a[i]+" Consoana");
}
}
}
}
<file_sep>package com.step.model.mobila;
public class Scaun {
private String culoare;
private int inaltime=50;
private int rotatieGrade =0;
public String getCuloare() {
return culoare;
}
public void setCuloare(String culoare) {
this.culoare = culoare;
}
public int getInaltime() {
return inaltime;
}
public void setInaltime(int inaltime) {
if (inaltime < 50)
this.inaltime = 50;
else if (inaltime > 100)
this.inaltime = 100;
else
this.inaltime = inaltime;
}
public int getRotatieGrade() {
return rotatieGrade;
}
public void setRotatieGrade(int rotatieGrade) {
if(rotatieGrade<0)
this.rotatieGrade =0;
else if(rotatieGrade>360)
this.rotatieGrade =360;
else
this.rotatieGrade = rotatieGrade;
}
}
<file_sep>package com.step.comparator;
public class NumbersComparator {
public int maxim(int a,int b)
{
if(a>=b)
{return a;}
else
{return b;}
}
public int minim(int a,int b)
{
if(a>=b)
{return b;}
else
{return a;}
}
}
<file_sep>package com.step.model.tehnica;
public class CuptorCuMicrounde {
private int temperatura=0;
private int timp=0;
private String starea="Off";
private String usa="Deschis";
private String lumina="Off";
public int getTemperatura() {
return temperatura;
}
public void setTemperatura(int temperatura) {
this.temperatura = temperatura;
}
public int getTimp() {
return timp;
}
public void setTimp(int timp) {
this.timp = timp;
}
public String getStarea() {
return starea;
}
public void cuptorOn(){
this.starea="On";
}
public void cuptorOff(){
this.starea="Off";
}
public String getLumina() {
return lumina;
}
public String getUsa() {
return usa;
}
public void luminaOn(){
if(this.usa=="Deschis"&&this.starea=="On")
{this.lumina="On";}
else this.lumina="Off";
}
public void deschideUsa(){
this.usa="Deschis";
}
public void inchideUsa(){
this.usa="Inchis";
}
}
<file_sep>package com.step.model.tehnica;
public class Aspirator {
private String culoare;
private int putere;
private String stare="Off";
private String cosGunoi="Empty";
public void setCosGunoi(int masa) {
if(masa<200)
this.cosGunoi = "Empty";
else if(masa>200 &&masa<700)
this.cosGunoi = "Medium";
else this.cosGunoi = "Full";
}
public String getCosGunoi() {
return cosGunoi;
}
public String getStare() {
return stare;
}
public void stareOff(){
this.stare="Off";
}
public void stareOn(){
this.stare="On";
}
public int getPutere() {
return putere;
}
public void setPutere(int putere) {
if(putere>1200)
this.putere=1200;
else
this.putere = putere;
}
public String getCuloare() {
return culoare;
}
public void setCuloare(String culoare) {
this.culoare = culoare;
}
}
<file_sep>package array;
import java.util.Scanner;
public class Main {
public static void main(String[] args) {
int[] a =new int[10];
Scanner sc = new Scanner(System.in);
for(int i=0;i<10;i++){
System.out.print("Dati a[" + i + "]=");
a[i] = sc.nextInt();
}
for(int i=9;i>=0;i--){
System.out.println(a[i]);
}
}
}
<file_sep>package com.step.model.tehnica;
public class FierbatorDeApa {
private String putere="2200 W";
private String stare="Off";
public String getStare() {
return stare;
}
public void stareOn() {
this.stare = "On";
}
public void stareOff() {
this.stare = "Off";
}
public String getPutere() {
return putere;
}
}
<file_sep>package com.step.calculator;
public class Calculator{
public int sum(int a,int b)
{
return (a+b);
}
public int dif(int a,int b)
{
return(a-b);
}
public int produs(int a,int b)
{
return(a*b);
}
public double inpartire(int a,int b)
{
return(Double.valueOf(a)/b);
}
}<file_sep>package com.step.model.tehnica;
public class Mixer {
private String stare="Off";
private int viteza=0;
public int getViteza() {
return viteza;
}
public void setViteza(int viteza) {
if(viteza<0)
this.viteza =0;
else if(viteza>6)
this.viteza = 6;
else
this.viteza = viteza;
}
public String getStare() {
return stare;
}
public void stareOn() {
this.stare = "On";
}
public void stareOff() {
this.stare = "Off";
}
}
<file_sep>package com.step.model.tehnica;
public class FierDeCalcat {
private String stare="Off";
private int temperatura=0;
private String aburi="Off";
public void aburiOn() {
this.aburi = "On";
}
public void aburiOff() {
this.aburi = "Off";
}
public String getAburi() {
return aburi;
}
public String getStare() {
return stare;
}
public void stareOn() {
this.stare = "On";
}
public void stareOff() {
this.stare = "Off";
}
public int getTemperatura() {
return temperatura;
}
public void setTemperatura(int temperatura){
if(temperatura<0)
this.temperatura=0;
else if(temperatura>100)
this.temperatura=100;
else this.temperatura=temperatura;
}
}
<file_sep>package com.step.calculator;
import com.step.comparator.NumbersComparator;
public class App {
public static void main(String[] args){
int a=70,b=37;
Calculator calcul=new Calculator();
System.out.println("a+b="+calcul.sum(a,b));
System.out.println("a-b="+calcul.dif(a,b));
System.out.println("a*b="+calcul.produs(a,b));
System.out.println("a/b="+calcul.inpartire(a,b));
NumbersComparator compara = new NumbersComparator();
System.out.println("Maximul este "+compara.maxim(a,b));
System.out.println("Minimul este "+compara.minim(a,b));
}
}
<file_sep>public class MultiplicativeTable {
public static void main(String[] args) {
int n=7;
for(int i=0;i<11;i++)
{
System.out.println(n*i);
}
}
}
<file_sep>package com.step.model.tehnica;
public class Aragaz {
private int nrOchi=4;
private double temperaturaOchi1=0;
private double temperaturaOchi2=0;
private double temperaturaOchi3=0;
private double temperaturaOchi4=0;
public double getTemperaturaOchi4() {
return temperaturaOchi4;
}
public void setTemperaturaOchi4(double temperaturaOchi4) {
if(temperaturaOchi4>150)
this.temperaturaOchi4 = 150;
else if(temperaturaOchi4<0)
this.temperaturaOchi4= 0;
else
this.temperaturaOchi4 = temperaturaOchi4;
}
public double getTemperaturaOchi3() {
return temperaturaOchi3;
}
public void setTemperaturaOchi3(double temperaturaOchi3) {
if(temperaturaOchi3>150)
this.temperaturaOchi3 = 150;
else if(temperaturaOchi3<0)
this.temperaturaOchi3= 0;
else
this.temperaturaOchi3 = temperaturaOchi3;
}
public int getNrOchi() {
return nrOchi;
}
public double getTemperaturaOchi1() {
return temperaturaOchi1;
}
public void setTemperaturaOchi1(double temperaturaOchi1) {
if(temperaturaOchi1>150)
this.temperaturaOchi1 = 150;
else if(temperaturaOchi1<0)
this.temperaturaOchi1 = 0;
else
this.temperaturaOchi1 = temperaturaOchi1;
}
public double getTemperaturaOchi2() {
return temperaturaOchi2;
}
public void setTemperaturaOchi2(double temperaturaOchi2) {
if(temperaturaOchi2>150)
this.temperaturaOchi2 = 150;
else if(temperaturaOchi2<0)
this.temperaturaOchi2= 0;
else
this.temperaturaOchi2 = temperaturaOchi2;
}
}
<file_sep>package com.step.model.mobila;
public class Masa {
private String culoare;
private int lungime=70;
public String getCuloare() {
return culoare;
}
public void setCuloare(String culoare) {
this.culoare = culoare;
}
public int getLungime() {
return lungime;
}
public void setLungime(int lungime) {
if(lungime<70)
this.lungime=70;
else if(lungime>246)
this.lungime=246;
else
this.lungime = lungime;
}
}
|
d838a64c4214a8d2512644c96f3687ec1bcb9653
|
[
"Java"
] | 19
|
Java
|
radvanall/temapentruacasa
|
d60b7a4de386a49e23a16bd9c14db91a7144bf99
|
5efde774271e4678ade51818a43bbcf9432e8462
|
refs/heads/master
|
<repo_name>matthewwier/java-design-patterns<file_sep>/combinator/src/main/java/com/iluwatar/combinator/GoldenLynx.java
package com.iluwatar.combinator;
public enum GoldenLynx {
BlueStoneOfTreason, GreenStoneOfJudgement, RedStoneOfFaith;
}
<file_sep>/combinator/src/main/java/com/iluwatar/combinator/ButterFlyLampSet.java
package com.iluwatar.combinator;
import java.util.function.Function;
public interface ButterFlyLampSet extends Function<Warrior, Boolean> {
static ButterFlyLampSet hasBlueEye(){
return warrior -> warrior.getListOfButterflyLamps().contains(ButterflyLamp.BlueEye);
}
static ButterFlyLampSet hasRedEye(){
return warrior -> warrior.getListOfButterflyLamps().contains(ButterflyLamp.RedEye);
}
static ButterFlyLampSet hasGreenEye(){
return warrior -> warrior.getListOfButterflyLamps().contains(ButterflyLamp.GreenEye);
}
default ButterFlyLampSet and(ButterFlyLampSet other){
return warrior -> this.apply(warrior) && other.apply(warrior);
}
}
<file_sep>/combinator/README.md
---
layout: pattern # layout must allways be pattern
title: Combinator # the properly formatted title
folder: combinator # the folder name in which this pattern lies
permalink: /patterns/combinator/ # the permalink to the pattern, to keep this uniform please stick to /patterns/FOLDER/
categories: Behavioral # categories of the pattern
tags: # tags of the pattern
- Java
- Difficulty-Intermediate
---
## Intent
Combinator pattern is used to enable datailed validation of objects using functional programming.

## Applicability
Use the Combinator pattern when
* You want to combine primitives into more complex structures
* You want to strengthen the Single Responsibility Principle
## Related patterns
* Specification pattern
<file_sep>/combinator/src/main/java/com/iluwatar/combinator/Beerstein.java
package com.iluwatar.combinator;
public enum Beerstein {
GreenCatseye, RedCatseye, YellowCatseye;
}
<file_sep>/combinator/src/main/java/com/iluwatar/combinator/GoldenLynxSet.java
package com.iluwatar.combinator;
import java.util.function.Function;
public interface GoldenLynxSet extends Function<Warrior, Boolean> {
static GoldenLynxSet hasBlueStoneOfTreason(){
return warrior -> warrior.getListOfGoldenLynxes().contains(GoldenLynx.BlueStoneOfTreason);
}
static GoldenLynxSet hasGreenStoneOfJudgement(){
return warrior -> warrior.getListOfGoldenLynxes().contains(GoldenLynx.GreenStoneOfJudgement);
}
static GoldenLynxSet hasRedStoneOfFaith(){
return warrior -> warrior.getListOfGoldenLynxes().contains(GoldenLynx.RedStoneOfFaith);
}
default GoldenLynxSet and(GoldenLynxSet other){
return warrior -> this.apply(warrior) && other.apply(warrior);
}
}
|
572bef88ba141912d048b5a84df98d3ad763974d
|
[
"Markdown",
"Java"
] | 5
|
Java
|
matthewwier/java-design-patterns
|
a1984d644172e614ce74ef4828a6536291ec0d13
|
fad17f425daebeaedb897de829d51d468056da51
|
refs/heads/master
|
<file_sep>
int busca (int x[],int qtd, int num){
if(x[qtd] == num){
return 1;
}
else if(qtd == 0){
return 0;
}
else if (qtd !=0) {
return busca(x,qtd-1,num);
}
}
#include <stdio.h>
int main(int argc, char **argv)
{
int i[3],n;
i[0] = 2;
i[1] = 4;
i[2] = 1;
scanf("%d",&n);
if( busca(i,3,n) == 1){
printf("Encontrado \n");
}
else {
printf("Nao encontrado \n");
}
return 0;
}
<file_sep>#include <stdio.h>
int numero(int num, int busca){
int soma = 0;
int numisolado = num%10;
if (num <= 0) {
return 0;
}
if(numisolado == busca){
soma = 1;
}
return soma + numero(num/10, busca);
}
int main(int argc, char **argv)
{
int i,n;
scanf("%d",&i);
scanf("%d",&n);
printf("%d", numero(i,n));
return 0;
}
<file_sep>
#include <stdio.h>
void ordem(int a, int b, int c){
if (a > b && a > c) {
if ( b > c) {
printf("%d \n", c);
printf("%d \n", b);
printf("%d \n", a);
}
else {
printf("%d \n", b);
printf("%d \n", c);
printf("%d \n", a);
}
}
else if (b > a && b > c) {
if ( a > c) {
printf("%d \n", c);
printf("%d \n", a);
printf("%d \n", b);
}
else {
printf("%d \n", a);
printf("%d \n", c);
printf("%d \n", b);
}
}
else if (c > a && c > b) {
if ( b > a) {
printf("%d \n", a);
printf("%d \n", b);
printf("%d \n", c);
}
else {
printf("%d \n", b);
printf("%d \n", a);
printf("%d \n", c);
}
}
}
int main(int argc, char **argv)
{
int x,y,z;
scanf("%d",&x);
scanf("%d",&y);
scanf("%d",&z);
ordem(x,y,z);
return 0;
}
<file_sep>
#include <stdio.h>
void verificar(int dia, int mes, int ano){
if(dia <= 31 && dia > 0){
if (dia > 30 && (mes == 4 || mes == 6 || mes == 9 || mes == 11)) {
printf("DAVA INVALIDA \n");
}
else if (dia > 28 && mes == 2){
printf("DAVA INVALIDA \n");
}
else {
printf("DAVA VALIDA \n");
}
}
else {
printf("DAVA INVALIDA \n");
}
}
int main(int argc, char **argv)
{
int x,y,z;
scanf("%d",&x);
scanf("%d",&y);
scanf("%d",&z);
verificar(x,y,z);
return 0;
}
<file_sep>#include <stdio.h>
int maior (int x[], int n){
int m;
if(n == 1){
m = x[0];
}
else {
m = maior(x,n-1);
if(x[n-1] > m){
m = x[n-1];
}
}
return m;
}
int main(int argc, char **argv)
{
int i[3];
i[0] = 2;
i[1] = 4;
i[2] = 1;
printf("%d",maior(i,3));
return 0;
}
<file_sep>#include <ctype.h>
#include <string.h>
#include <stdio.h>
void zenitpolar(char frase[]){
int i = 0, j = strlen(frase);
for (i;i<j;i++){
if (frase[i] == 'z'){ frase[i] = 'p';}
else if (frase[i] == 'e'){ frase[i] = 'o';}
else if (frase[i] == 'n'){ frase[i] = 'l';}
else if (frase[i] == 'i'){ frase[i] = 'a';}
else if (frase[i] == 't'){ frase[i] = 'r';}
else if (frase[i] == 'p'){ frase[i] = 'z';}
else if (frase[i] == 'o'){ frase[i] = 'e';}
else if (frase[i] == 'l'){ frase[i] = 'n';}
else if (frase[i] == 'a'){ frase[i] = 'i';}
else if (frase[i] == 'r'){ frase[i] = 't';}
else if (frase[i] == 'Z'){ frase[i] = 'P';}
else if (frase[i] == 'E'){ frase[i] = 'O';}
else if (frase[i] == 'N'){ frase[i] = 'L';}
else if (frase[i] == 'I'){ frase[i] = 'A';}
else if (frase[i] == 'T'){ frase[i] = 'R';}
else if (frase[i] == 'P'){ frase[i] = 'Z';}
else if (frase[i] == 'O'){ frase[i] = 'E';}
else if (frase[i] == 'L'){ frase[i] = 'N';}
else if (frase[i] == 'A'){ frase[i] = 'I';}
else if (frase[i] == 'R'){ frase[i] = 'T';}
}
printf("%s",frase);
}
int main(int argc, char **argv)
{
char frase[999];
gets(frase);
zenitpolar(frase);
return 0;
}
<file_sep>===== Listas de C da Materia de Algoritmos =============
===== FAZENDO A LISTA 6 ===========
Questão 1: PRONTA
Questão 2: PRONTA
Questão 3: RESPONDIDA NA 4
Questão 4: PRONTA
Questão 5: PRONTA
Qualquer duvidas nas questões só me perguntar.
===== FAZENDO A LISTA 6 ===========
<file_sep>#ifndef Divisores_divisores_h
#define Divisores_divisores_h
#include <stdio.h>
int gera_divisores(unsigned long int num, FILE * arquivo);
#endif
<file_sep>
#include <stdio.h>
int menor(int a, int b){
int menor;
if(a<b){
menor = a;
}
else {
menor = b;
}
return menor;
}
int main(int argc, char **argv)
{
int x,y;
scanf("%d",&x);
scanf("%d",&y);
printf("%d",menor(x,y));
return 0;
}
<file_sep>
#include <stdio.h>
int main(int argc, char **argv)
{
double x,y;
scanf("%lf",&x);
scanf("%lf",&y);
double a = pow(x,y);
printf("%lf",a);
return 0;
return 0;
}
<file_sep>#include <stdio.h>
void situacao(float a, float b){
float media = ((a*2) + (b*3))/5;
if (media < 6 && media > 3){
printf("RECUPERACAO \n");
}
else if (media > 6 ){
printf("APROVADO \n");
}
else if (media < 3 ){
printf("REPROVADO \n");
}
}
int main(int argc, char **argv)
{
float x,y;
scanf("%f",&x);
scanf("%f",&y);
situacao(x,y);
return 0;
}
<file_sep>
#include <stdio.h>
int fibonacci(int n){
if(n < 2){
return n;
}
return fibonacci(n -1) + fibonacci(n - 2);
}
int main(int argc, char **argv)
{
int numero;
scanf("%d", &numero);
printf("%d",fibonacci(numero));
return 0;
}
<file_sep>#include <ctype.h>
#include <string.h>
#include <stdio.h>
void zenitpolar(FILE * zenit) {
char c;
while( (c = fgetc(zenit))!= EOF){
printf("%c",c);
if (c == 'z'){ c = 'p';}
else if (c == 'e'){ c = 'o';}
else if (c == 'n'){ c = 'l';}
else if (c == 'i'){ c = 'a';}
else if (c == 't'){ c = 'r';}
else if (c == 'p'){ c = 'z';}
else if (c == 'o'){ c = 'e';}
else if (c == 'l'){ c = 'n';}
else if (c == 'a'){ c = 'i';}
else if (c == 'r'){ c = 't';}
else if (c == 'Z'){ c = 'P';}
else if (c == 'E'){ c = 'O';}
else if (c == 'N'){ c = 'L';}
else if (c == 'I'){ c = 'A';}
else if (c == 'T'){ c = 'R';}
else if (c == 'P'){ c = 'Z';}
else if (c == 'O'){ c = 'E';}
else if (c == 'L'){ c = 'N';}
else if (c == 'A'){ c = 'I';}
else if (c == 'R'){ c = 'T';}
}
}
int main(int argc, char **argv)
{
FILE * zenitpolars;
char arquivo[999]; /* zp[3] = ".zp"; */
gets(arquivo);
/*
int i = 0, j = strlen(arquivo);
for (i;i<j;i++){
if (arquivo[i] == 'z'){ arquivo[i] = 'p';}
else if (arquivo[i] == 'e'){ arquivo[i] = 'o';}
else if (arquivo[i] == 'n'){ arquivo[i] = 'l';}
else if (arquivo[i] == 'i'){ arquivo[i] = 'a';}
else if (arquivo[i] == 't'){ arquivo[i] = 'r';}
else if (arquivo[i] == 'p'){ arquivo[i] = 'z';}
else if (arquivo[i] == 'o'){ arquivo[i] = 'e';}
else if (arquivo[i] == 'l'){ arquivo[i] = 'n';}
else if (arquivo[i] == 'a'){ arquivo[i] = 'i';}
else if (arquivo[i] == 'r'){ arquivo[i] = 't';}
else if (arquivo[i] == 'Z'){ arquivo[i] = 'P';}
else if (arquivo[i] == 'E'){ arquivo[i] = 'O';}
else if (arquivo[i] == 'N'){ arquivo[i] = 'L';}
else if (arquivo[i] == 'I'){ arquivo[i] = 'A';}
else if (arquivo[i] == 'T'){ arquivo[i] = 'R';}
else if (arquivo[i] == 'P'){ arquivo[i] = 'Z';}
else if (arquivo[i] == 'O'){ arquivo[i] = 'E';}
else if (arquivo[i] == 'L'){ arquivo[i] = 'N';}
else if (arquivo[i] == 'A'){ arquivo[i] = 'I';}
else if (arquivo[i] == 'R'){ arquivo[i] = 'T';}
}
*/
zenitpolars = fopen(arquivo,"r+");
if (zenitpolars == NULL){
printf("ERROR: Não foi possível abrir o arquivo.\n");
return 1;
}
zenitpolar(zenitpolars);
fclose(zenitpolars);
return 0;
}
<file_sep>#include <stdio.h>
#include <stdlib.h>
#include <string.h>
int main(int argc, char **argv) {
FILE *agenda;
char nome[80], telefone[20];
agenda = fopen("agenda.txt", "w");
fputs("*** Agenda de Contatos ***\n", agenda);
do {
printf("Nome:");
gets(nome);
printf("telefone: ");
gets(telefone);
if (strcmp(nome, "fim") != 0) {
fprintf(agenda, "%s:%s\n", nome, telefone);
}
}while (strcmp(nome, "fim") != 0);
fclose(agenda);
return 0;
}
<file_sep>#include <stdlib.h>
#include <stdio.h>
int * MaiorMenor (int a[], int qnt){
int * array = (int*) malloc (2*sizeof(int));
if(qnt == 1){
array[0] = a[0];
array[1] = a[0];
}
else{
array = MaiorMenor(a,qnt-1);
if(a[qnt-1] < array[0]){
array[0] = a[qnt-1];
}
if(a[qnt-1] > array[1]){
array[1] = a[qnt-1];
}
}
return array;
}
int main(int argc, char **argv)
{
int a[5] = {3,2,4,6,1};
int * ponteiro;
ponteiro = MaiorMenor(a,5);
printf("%d \n",ponteiro[0]);
printf("%d \n",ponteiro[1]);
return 0;
}
<file_sep>#include <stdio.h>
void procurarcep (char cep[], FILE * arquivo){
char c;
int cont = 0;
int cont2 = 0; /* Variavel criada para ir avançando entra as posições do arquivo */
int i = 0;
while( (c = fgetc(arquivo))!= EOF ){ /* Aqui ele vai atribuir cada caractere do arquivo na variavel "c" */
if (cep[i] == c) { /* Caso o caractere for igual ao primeiro numero do cep indicado ele vai entrar nessa condição, aumentando a contagem e a variavel "i" para avançar o numero do cep */
cont++;
i++;
}
if ( c == ':' && cont == 9){
fseek(arquivo, cont2 + 1, SEEK_SET ); /* Caso ele encontrar o caractere ":" ele checa o cont e se ele valer 9, ele vai começar a ler o arquivo a parti da posição indiacada
acrencentando +1 para passa dos ":" */
break;
}
if (c == '\n'|| c == ':') { /* Encontrado algum desses caracteres ele vai reiniciar os contadores */
i = 0;
cont = 0;
}
if (c == '\n'){ /* A cada quebra de linha ele não conta como um avanço no arquivo, então eu fiz isso para arrumar*/
cont2++; /* Se você usa Linux pode desconsiderar essa linha*/
}
cont2++; /* Ir avançando a posição do arquivo */
}
if (cont == 9) {
while((c = fgetc(arquivo))!= EOF ){ /* Com o arquivo reiniciado a partir da posição que indicamos só basta imprimir tudo que vier depois até vir uma quebra de linha */
putchar(c);
if (c == '\n'){
break;
}
}
}
}
int main(int argc, char **argv)
{
FILE * enderecos;
enderecos = fopen( argv[2],"r");
if (enderecos == NULL){
printf("ERROR: Não foi possível abrir o arquivo.\n");
return 1;
}
procurarcep(argv[1], enderecos);
fclose(enderecos);
return 0;
}
<file_sep>
#include <string.h>
#include <stdio.h>
int main(int argc, char **argv)
{
char nome[999];
scanf("%s",nome);
int i = strlen(nome);
int n =0,cont=0;
char x[i];
int f = 0;
for (n;n<i;n++) {
if(ispunct(nome[n]) == 0){
x[f] = nome[n];
cont++;
f++;
}
}
printf("%d \n",strlen(x));
printf("%d",i);
printf("%d",cont);
printf("%s",x);
return 0;
}
<file_sep>#include <string.h>
#include <stdio.h>
#include <stdlib.h>
struct ListaDuplaNo {
int x;
struct ListaDuplaNo *proximo,*anterior;
};
struct ListaDupla {
struct ListaDuplaNo *inicio,*fim;
};
void inserirMeio(struct ListaDupla * lista, int n){ /*ARRUMADO!!! */
struct ListaDuplaNo *aux; /* Criando uma Auxiliar*/
aux = lista->inicio; /* Apontando a aux para o inicio para percorrer*/
int cont = 0;
int x = 0;
while (aux != NULL){
aux = aux->proximo;
cont++; /* Contando quantos elementos tem */
}
if (cont % 2 == 0){ /* Checando a metade da lista*/
cont = cont/2 - 1;
}
else {
cont = (cont + 2)/2 - 1;
}
free(aux); /*Liberando Aux ja que não vai mais usar*/
struct ListaDuplaNo * aux2; /* Criando outra auxiliar para fazer outra busca*/
aux2 = lista->inicio;
while (x != cont){ /*Vai parar exatamente na metade*/
x++;
aux2 = aux2->proximo;
}
/* Adicionando o novo elemento na metade da lista*/
struct ListaDuplaNo *novo;
novo = (struct ListaDuplaNo*)malloc(sizeof(struct ListaDuplaNo));
novo->x = n;
novo->proximo = aux2->proximo;
novo->anterior = aux2;
aux2->proximo->anterior = novo;
aux2->proximo = novo;
}
void Inserir (struct ListaDupla * lista, int n){ /*Funcionando */
struct ListaDuplaNo *novo;
novo = (struct ListaDuplaNo*)malloc(sizeof(struct ListaDuplaNo));
novo->x = n;
if(lista->inicio == NULL){ /*NÃO SEI PQ ESSA CONDIÇÃO NÃO FUNCIONA */
novo->x = n;
novo->anterior = NULL;
lista->inicio = novo;
lista->fim = novo;
lista->fim->proximo = NULL;
}
else{
novo = (struct ListaDuplaNo*)malloc(sizeof(struct ListaDuplaNo));
novo->x = n;
novo->anterior = lista->fim;
lista->fim->proximo = novo;
lista->fim = novo;
lista->fim->proximo = NULL;
}
}
/*CRIEI POIS FUNÇÃO INSERIR NAO ENTRA NO IF INICIAL */
void CriarLista (struct ListaDupla * lista, int n){ /*Funcionando */
struct ListaDuplaNo *novo;
novo = (struct ListaDuplaNo*)malloc(sizeof(struct ListaDuplaNo));
novo->x = n;
novo->anterior = NULL;
lista->inicio = novo;
lista->fim = novo;
lista->fim->proximo = NULL;
}
void ListarElementos (struct ListaDupla * lista){ /*Funcionando */
struct ListaDuplaNo *aux;
aux = lista->inicio;
while(aux != NULL){
printf("NUMERO: %d \n",aux->x);
aux = aux->proximo;
}
}
void inserirOrdenado(struct ListaDupla * lista, int n) { /*FUNCIONANDO*/
if(lista->inicio == NULL){
CriarLista(lista,n);
}
else{
struct ListaDuplaNo *aux;
aux = lista->inicio;
struct ListaDuplaNo *novo;
novo = (struct ListaDuplaNo*)malloc(sizeof(struct ListaDuplaNo));
if (n > lista->fim->x){
novo->x = n;
novo->proximo = NULL;
novo->anterior = lista->fim;
lista->fim->proximo = novo;
}
else {
while (aux != NULL) {
if (n < lista->inicio->x ){
novo->x = n;
novo->proximo = lista->inicio;
novo-> anterior = NULL;
lista->inicio->anterior = novo;
lista->inicio = novo;
break;
}
else if(n > aux->anterior->x && n < aux->proximo->x){
novo->x = n;
novo->proximo = aux->proximo;
novo->anterior = aux;
aux->proximo->anterior = novo;
aux->proximo = novo;
break;
}
aux = aux->proximo;
}
}
}
}
void inverter(struct ListaDupla * lista){
struct ListaDupla *listan;
listan = (struct ListaDupla*)malloc(sizeof(struct ListaDupla));
struct ListaDuplaNo *aux;
aux = lista->fim;
int x = 0;
while(aux!=NULL){
if(x==0){
CriarLista (listan, aux->x);
aux = aux->anterior;
x++;
}
else {
Inserir(listan, aux->x);
aux = aux->anterior;
}
}
*lista = *listan;
free(listan);
}
void removerDuplicados(struct ListaDupla * lista) { /* NÃO FUNCIONA */
struct ListaDuplaNo *aux, *aux2, *aux3;
aux = lista->inicio;
while(aux != NULL){
aux2 = aux->proximo;
while(aux2!=NULL){
if (aux->x == aux2->x){
aux3 = aux2;
aux2 = aux2->proximo;
aux3->proximo->anterior = aux3->anterior;
aux3->anterior = aux3->proximo;
free(aux3);
aux3 = NULL;
}
else if (aux->x == aux2->x && aux2 == lista->fim){
aux3 = aux2;
aux2 = aux2->proximo;
aux3->anterior->proximo = NULL;
free(aux3);
aux3 = NULL;
}
else {
aux2 = aux2->proximo;
}
}
aux = aux->proximo;
}
}
struct ListaDupla * moverNumerosMaioresQueN(struct ListaDupla * original, int n){
struct ListaDupla *listan;
listan = (struct ListaDupla*)malloc(sizeof(struct ListaDupla));
struct ListaDuplaNo *aux;
aux = original->inicio;
int x = 0;
while (aux!=NULL){
if (aux->x > n && x == 0){
CriarLista (listan, aux->x);
aux = aux->proximo;
x++;
}
else if (aux->x > n && x!= 0) {
Inserir(listan, aux->x);
aux = aux->proximo;
}
else {
aux = aux->proximo;
}
}
return listan;
}
void ordenar(struct ListaDupla * lista){
struct ListaDuplaNo *aux;
struct ListaDupla *listan;
listan = (struct ListaDupla*)malloc(sizeof(struct ListaDupla));
aux = lista->inicio;
listan->inicio = NULL;
while(aux != NULL){
inserirOrdenado(listan,aux->x);
aux = aux->proximo;
}
*lista = *listan;
free(listan);
}
int main(int argc, char **argv)
{
struct ListaDupla *lista;
lista = (struct ListaDupla*)malloc(sizeof(struct ListaDupla));
lista->inicio = NULL; /*GAMBIARRA?!?!?! */
int op = 1;
int x;
while(op != 0){
printf("\n DIGITE 1 PARA CRIAR NOVA LISTA \n DIGITE 2 PARA INSERIR NOVO ELEMENTO NA LISTA \n DIGITE 3 PARA INSERIR NO MEIO DA LISTA \n DIGITE 4 PARA LISTAR OS ELEMENTOS \n DIGITE 5 PARA COLOCAR UM ELEMENTO ORDENADO NA LISTA \n DIGITE 6 PARA INVERTER A LISTA \n DIGITE 7 PARA REMOVER REPETIDOS \n DIGITE 8 PARA SEPARAR OS NUMERO MAIORES QUE N \n DIGITE 9 PARA ORDENAR A LISTA \n DIGITE 0 PARA SAIR \n");
scanf("%d",&op);
switch(op) {
case 1:
printf("DIGITE O NUMERO DO PRIMEIRO ELEMENTO DA LISTA \n");
scanf("%d",&x);
CriarLista(lista,x); /*Funcionando */ /*DESCONSIDERAR*/
break;
case 2:
printf("DIGITE O NUMERO A SER INSERIDO NA LISTA \n");
scanf("%d",&x);
Inserir(lista,x); /*Funcionando */
break;
case 3:
printf("DIGITE O NUMERO A SER INSERIDO NO MEIO DA LISTA \n");
scanf("%d",&x);
inserirMeio(lista,x); /*ARRUMADO!!!*/
break;
case 4:
ListarElementos(lista); /*Funcionando */
break;
case 5:
printf("DIGITE O NUMERO A SER INSERIDO NO MEIO DA LISTA NA ORDEM \n");
scanf("%d",&x);
inserirOrdenado(lista,x); /*ARRUMADO!!!*/
break;
case 6:
printf("INVERTENDO A LISTA \n"); /*FUNCIONANDO!*/
inverter(lista);
break;
case 7:
printf("TIRANDO OS REPETIDOS \n"); /*PROBLEMA*/
removerDuplicados(lista);
break;
case 8:
printf("RETORNANDO UMA LISTA COM OS NUMERO MAIORES QUE N: DIGITE N \n"); /*FUNCIONANDO*/
scanf("%d",&x);
ListarElementos(moverNumerosMaioresQueN(lista,x));
break;
case 9:
printf("ORDENANDO A LISTA \n"); /*NÃO FUNCIONA*/
ordenar(lista);
break;
};
}
return 0;
}
<file_sep>
#include <stdio.h>
void mdc(int a, int b){
int maior,menor,z,w,resto;
if( a > b) {
maior = a;
menor = b;
}
else {
maior = b;
menor = a;
}
z = maior;
w = menor;
while (w != 0){
resto = z % w;
z = w;
w = resto;
}
printf("%d",z);
}
int main(int argc, char **argv)
{
int x,y;
scanf("%d",&x);
scanf("%d",&y);
mdc(x,y);
return 0;
}
<file_sep>
void verificar (int a){
int soma = 0;
if (a > 0){
int i = 2;
for(i; i<a; i++){
printf ("%d \n",i);
soma = i + soma;
}
printf("%d \n",soma);
}
else {
printf ("NEGATIVO \n");
}
}
#include <stdio.h>
int main(int argc, char **argv)
{
int x;
scanf("%d",&x);
verificar(x);
return 0;
}
<file_sep>
#include <stdio.h>
int soma(int a, int b){
int resultado = a + b;
return resultado;
}
int main(int argc, char **argv)
{
int x,y;
scanf("%d",&x);
scanf("%d",&y);
printf("%d",soma(x,y));
return 0;
}
<file_sep>#include <stdio.h>
void contarlinhas(FILE * arquivo) {
char c;
int cont = 1;
while( (c = fgetc(arquivo))!= EOF ){
if (c == '\n') {
cont++;
}
}
printf("%d",cont);
}
int main(int argc, char **argv)
{
FILE *linhas;
linhas = fopen("linhas.txt", "r");
if(linhas == NULL)
printf("Erro, nao foi possivel abrir o arquivo\n");
else
contarlinhas(linhas);
return 0;
}
<file_sep>/*
* poema.c
*
* Copyright 2014 X-pLoDe <X-pLoDe@X-PLODE-PC>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
* MA 02110-1301, USA.
*
*
*/
#include <stdio.h>
int main(int argc, char **argv)
{
char ch;
FILE *arq;
arq = fopen("poema.txt", "r");
if(arq == NULL)
printf("Erro, nao foi possivel abrir o arquivo\n");
else
while( (ch=fgetc(arq))!= EOF || (ch=fgetc(arq))!= '\n' )
putchar(ch);
fclose(arq);
return 0;
}
<file_sep>
#include <stdio.h>
float converter(float a){
float resultado = (((a - 32) * 5 ) / 9);
return resultado;
}
int main(int argc, char **argv)
{
float x;
scanf("%f",&x);
printf("%f", converter(x));
return 0;
}
<file_sep>
#include <ctype.h>
#include <string.h>
#include <stdio.h>
void palavras(char frase[]){
int i = 0;
int j = strlen(frase);
int cont = 0;
for (i;i<j;i++){
if(isspace(frase[i])){
cont++;
}
}
cont++;
printf ("%d",cont);
}
int main(int argc, char **argv)
{
char frase[999];
gets(frase);
palavras(frase);
return 0;
}
<file_sep>
#include <stdio.h>
void media(double a[], int b){
double media = 0;
int i = 0,contm = 0,contmn = 0;
for(i; i<b;i++){
media = a[i] + media;
}
media = media / b;
i = 0;
printf("%lf \n",media);
for(i; i<b;i++){
if(a[i] > media){
printf("%lf \n",a[i]);
contm++;
}
}
printf("%d MAIORES \n",contm);
i = 0;
for(i; i<b;i++){
if(a[i] < media){
printf("%lf \n",a[i]);
contmn++;
}
}
printf("%d MENORES \n",contmn);
}
int main(int argc, char **argv)
{
int x=0,i=0;
printf("DIGITE QUANTOS NUMEROS LER \n");
scanf("%d",&x);
double y[x];
for(i; i<x;i++){
scanf("%lf",&y[i]);
}
media(y,x);
return 0;
}
<file_sep>#include <stdio.h>
void diferenca(int i[], int a){
int diferenca= -99999999;
int x = 0;
for (x;x<a;x++){
if( (i[x] - i[x+1]) > diferenca) {
diferenca = i[x] - i[x+1];
}
}
printf("%d ccc \n",diferenca);
}
int main(int argc, char **argv)
{
int a[1000];
int i = 0;
int n = sizeof(a) / sizeof(a[0]);
for (i;i<n;i++){
scanf("%d",&a[i]);
}
diferenca(a,n);
return 0;
}
<file_sep>#include <stdio.h>
#include "divisores.h"
int gera_divisores(unsigned long int num, FILE * arquivo){
int i = 1;
fprintf(arquivo, "Divisores de %ld: \n", num);
for(i;i<=num;i++){
if(num % i == 0){
fprintf(arquivo, "%d \n", i);
}
}
return 0;
}
<file_sep>
#include <stdio.h>
void primo(int a){
int cont = 0;
int i = 2;
for (i;i<=a;i++){
if (a % i == 0) {
cont++;
}
}
if (cont < 2){
printf("PRIMO \n");
}
else { printf("NAO E PRIMO \n"); }
}
int main(int argc, char **argv)
{
int x;
scanf("%d",&x);
primo(x);
return 0;
}
<file_sep>
int somae (int x[], int n){
int soma = x[n];
if (n == 0){
return x[0];
}
else {
return soma + somae(x,n-1);
}
}
#include <stdio.h>
int main(int argc, char **argv)
{
int i[3];
i[0] = 2;
i[1] = 4;
i[2] = 1;
printf("%d",somae(i,3));
return 0;
}
<file_sep>
#include <stdio.h>
float media(float a, float b){
float media = ((a*2) + (b*3))/5;
return media;
}
int main(int argc, char **argv)
{
float x,y;
scanf("%f",&x);
scanf("%f",&y);
printf("%f \n", media(x,y));
return 0;
}
<file_sep>
#include <stdio.h>
void massa(double a, int x){
double massa = a;
int tempo = x;
int tempot = 0;
while (massa > 1){
massa = massa/2;
tempot = tempot + tempo;
}
printf ("%d \n",tempot);
}
int main(int argc, char **argv)
{
double x;
int y;
scanf("%lf",&x);
scanf("%d",&y);
massa(x,y);
return 0;
}
<file_sep>#include <ctype.h>
#include <string.h>
#include <stdio.h>
void palindromo(char palavra[]){
int i = 0, f = 0;
int j = strlen(palavra);
char x[j];
for (i;i<j;i++) {
if(ispunct(palavra[i]) == 0){
x[f] = palavra[i];
f++;
}
}
int ss = f;
char k[ss];
strcpy(k,x);
i = 0;
if (ss % 2 == 0) {
int z = ss /2;
int y = 1;
char aux;
for (i;i<z;i++){
aux = x[i];
x[i] = x[ss - y];
x[ss - y] = aux;
y++;
}
}
else {
int z = (ss - 1) / 2;
int y = 1;
char aux;
for (i;i<z;i++){
aux = x[i];
x[i] = x[ss - y];
x[ss - y] = aux;
y++;
}
}
if ( strcmp(x,k) == 0) {
printf("PALINDROMO \n");
}
else {
printf("NAO E PALINDROMO \n");
}
}
int main(int argc, char **argv)
{
char nome[999];
scanf("%s",nome);
palindromo(nome);
return 0;
}
<file_sep>
#include <stdio.h>
#include <stdlib.h>
struct ListaDuplaNo {
int x;
struct ListaDuplaNo *proximo,*anterior;
};
struct ListaDupla {
struct ListaDuplaNo *inicio,*fim;
};
void inserirMeio(struct ListaDupla * lista, int n){ /*ARRUMADO!!! */
struct ListaDuplaNo *aux; /* Criando uma Auxiliar*/
aux = lista->inicio; /* Apontando a aux para o inicio para percorrer*/
int cont = 0;
int x = 0;
while (aux != NULL){
aux = aux->proximo;
cont++; /* Contando quantos elementos tem */
}
if (cont % 2 == 0){ /* Checando a metade da lista*/
cont = cont/2 - 1;
}
else {
cont = (cont + 2)/2 - 1;
}
free(aux); /*Liberando Aux ja que não vai mais usar*/
struct ListaDuplaNo * aux2; /* Criando outra auxiliar para fazer outra busca*/
aux2 = lista->inicio;
while (x != cont){ /*Vai parar exatamente na metade*/
x++;
aux2 = aux2->proximo;
}
/* Adicionando o novo elemento na metade da lista*/
struct ListaDuplaNo *novo;
novo = (struct ListaDuplaNo*)malloc(sizeof(struct ListaDuplaNo));
novo->x = n;
novo->proximo = aux2->proximo;
novo->anterior = aux2;
aux2->proximo->anterior = novo;
aux2->proximo = novo;
}
void Inserir (struct ListaDupla * lista, int n){ /*Funcionando */
struct ListaDuplaNo *novo;
novo = (struct ListaDuplaNo*)malloc(sizeof(struct ListaDuplaNo));
novo->x = n;
novo->proximo = NULL;
novo->anterior = lista->fim;
lista->fim->proximo = novo;
lista->fim = novo;
}
void CriarLista (struct ListaDupla * lista, int n){ /*Funcionando */
struct ListaDuplaNo *novo;
novo = (struct ListaDuplaNo*)malloc(sizeof(struct ListaDuplaNo));
novo->x = n;
novo->proximo = NULL;
novo->anterior = NULL;
lista->inicio = novo;
lista->fim = novo;
}
void ListarElementos (struct ListaDupla * lista){ /*Funcionando */
struct ListaDuplaNo *aux;
aux = lista->inicio;
while(aux != NULL){
printf("NUMERO: %d \n",aux->x);
aux = aux->proximo;
}
}
int main(int argc, char **argv)
{
struct ListaDupla *lista;
lista = (struct ListaDupla*)malloc(sizeof(struct ListaDupla));
int op = 1;
int x;
while(op != 0){
printf("\n DIGITE 1 PARA CRIAR NOVA LISTA \n DIGITE 2 PARA INSERIR NOVO ELEMENTO NA LISTA \n DIGITE 3 PARA INSERIR NO MEIO DA LISTA \n DIGITE 4 PARA LISTAR OS ELEMENTOS \n DIGITE 0 PARA SAIR \n");
scanf("%d",&op);
switch(op) {
case 1:
printf("DIGITE O NUMERO DO PRIMEIRO ELEMENTO DA LISTA \n");
scanf("%d",&x);
CriarLista(lista,x); /*Funcionando */
break;
case 2:
printf("DIGITE O NUMERO A SER INSERIDO NA LISTA \n");
scanf("%d",&x);
Inserir(lista,x); /*Funcionando */
break;
case 3:
printf("DIGITE O NUMERO A SER INSERIDO NO MEIO DA LISTA \n");
scanf("%d",&x);
inserirMeio(lista,x); /*ARRUMADO!!!*/
break;
case 4:
ListarElementos(lista); /*Funcionando */
break;
};
}
return 0;
}
|
d364c33efb7df8db36894e1ef79b41f992bbd70f
|
[
"Markdown",
"C"
] | 34
|
C
|
jeffersonxplode/C
|
4c8b84844c20b26b55c728c56bed9a0a6402119c
|
72b3dcc28fda24b8abe9c771b321f6fbfebbd35b
|
refs/heads/master
|
<file_sep>//递归法
#include<bits/stdc++.h>
#define maxn 1000
using namespace std;
int n;
struct node
{
int lson;
int rson;
}tree[maxn];
void pre_order(int x)//先序遍历
{
cout<<x<<" ";
if (tree[x].lson==-1&&tree[x].rson==-1) return;
if (tree[x].lson!=-1) pre_order(tree[x].lson);
if (tree[x].rson!=-1) pre_order(tree[x].rson);
}
void mid_order(int x)//中序遍历
{
if (tree[x].lson==-1&&tree[x].rson==-1) {cout<<x<<" ";return;}
if (tree[x].lson!=-1) mid_order(tree[x].lson);
cout<<x<<" ";
if (tree[x].rson!=-1) mid_order(tree[x].rson);
}
void last_order(int x)//后序遍历
{
if (tree[x].lson==-1&&tree[x].rson==-1) {cout<<x<<" ";return;}
if (tree[x].lson!=-1) last_order(tree[x].lson);
if (tree[x].rson!=-1) last_order(tree[x].rson);
cout<<x<<" ";
}
int main()
{
int F,S,root;
cin>>n;
for (int i=1;i<maxn;i++)
{
tree[i].lson=-1;
tree[i].rson=-1;
}
for (int i=1;i<=n;i++)
{
int s_size;
scanf("%d%d%d",&s_size,&F,&S);
if (s_size==1)
tree[F].lson=S;
else tree[F].rson=S;
}
cin>>root;
cout<<"pre-order:";
pre_order(root);
cout<<endl;
cout<<"mid-order:";
mid_order(root);
cout<<endl;
cout<<"last-order:";
last_order(root);
}
//栈的非递归算法
/*
#include<bits/stdc++.h>
#define maxn 1000
using namespace std;
int n;
struct node
{
int num;
int lson;
int rson;
bool vis;
bool pri;
}tree[maxn];
stack<node> s;
void pre_order()//先序遍历
{
while(!s.empty())
{
if (!tree[s.top().num].vis)
{
cout<<s.top().num<<" ";
tree[s.top().num].vis=true;
}
if (s.top().lson==-1&&s.top().rson==-1) s.pop();
if (s.top().lson!=-1&&!tree[s.top().lson].vis) {//tree[s.top().lson].vis=true;
s.push(tree[s.top().lson]);continue;}
if (s.top().rson!=-1&&!tree[s.top().rson].vis) {//tree[s.top().rson].vis=true;
s.push(tree[s.top().rson]);continue;}
s.pop();
}
}
void mid_order()//中序遍历
{
while(!s.empty())
{
if (s.top().lson==-1&&s.top().rson==-1) {tree[s.top().num].vis=true;cout<<s.top().num;s.pop();}
if (s.top().lson!=-1&&!tree[s.top().lson].vis) {s.push(tree[s.top().lson]);continue;}
if (!tree[s.top().num].vis)
{
cout<<s.top().num<<" ";
tree[s.top().num].vis=true;
}
if (s.top().rson!=-1&&!tree[s.top().rson].vis) {s.push(tree[s.top().rson]);continue;}
s.pop();
}
}
void last_order()//后序遍历
{
while(!s.empty())
{
if (s.top().lson==-1&&s.top().rson==-1) {tree[s.top().num].vis=true;cout<<s.top().num;s.pop();}
if (s.top().lson!=-1&&!tree[s.top().lson].vis) {tree[s.top().lson].vis=true;s.push(tree[s.top().lson]);continue;}
if (s.top().rson!=-1&&!tree[s.top().rson].vis) {tree[s.top().rson].vis=true;s.push(tree[s.top().rson]);continue;}
cout<<s.top().num<<" ";
s.pop();
}
}
int main()
{
int F,S,root;
cin>>n;
for (int i=1;i<maxn;i++)
{
tree[i].lson=-1;
tree[i].rson=-1;
tree[i].vis=false;
}
for (int i=1;i<=n;i++)
{
int s_size;
scanf("%d%d%d",&s_size,&F,&S);
if (s_size==1)
tree[F].lson=S;
else tree[F].rson=S;
tree[F].num=F;
tree[S].num=S;
}
cin>>root;
while(!s.empty()) s.pop();
s.push(tree[root]);
cout<<"pre-order:";
pre_order();
cout<<endl;
for (int i=1;i<maxn;i++)
tree[i].vis=false;
tree[root].vis=true;
s.push(tree[root]);
cout<<"last-order:";
last_order();
cout<<endl;
for (int i=1;i<maxn;i++)
tree[i].vis=false;
s.push(tree[root]);
cout<<"mid-order:";
mid_order();
}*/
/*
#include<bits/stdc++.h>
using namespace std;
#define maxn 1000
#define inf 0x3f3f3f3f
struct node
{
char name;
node * lson;
node * bro;
bool vis;
};//孩子兄弟结构结点定义
struct parentnode
{
int fa;
char c;
node * point;
};//双亲数组结构结点定义
parentnode A[maxn];
int n;
int cnt=1;
node* trans()//将双亲数组结构转变成孩子兄弟树形结构
{
node * hp,*p1,*p2;
hp=(node*)malloc(sizeof(node));
hp->vis=false;
hp->name=A[1].c;
hp->bro=NULL;
A[1].point=hp;//根节点单独判断
for (int i=2;i<=n;i++)
{
if (A[i].fa!=A[i-1].fa)
{
p1=(node*)malloc(sizeof(node));
p1->vis=false;
p1->bro=NULL;
p1->lson=NULL;
p1->name=A[i].c;
A[i].point=p1;
A[A[i].fa].point->lson=p1;
//当前结点是新一层
}
else
{
p2=(node *)malloc(sizeof(node));
p2->vis=false;
p2->bro=NULL;
p2->lson=NULL;
p2->name=A[i].c;
A[i].point=p2;
p1->bro=p2;
p1=p2;//指针移动到兄弟结点
//当前结点层数不变
}
}
return hp;
}
void finddeep(node * p,int deep)//按照孩子-兄弟结构的指针进行遍历即可
{
if (!p->vis) {cout<<p->name<<" ";p->vis=true;}
if (p->bro==NULL&&p->lson==NULL)
{
cnt=max(cnt,deep);
return;
}
if (p->lson!=NULL) {finddeep(p->lson,deep+1);if (!p->vis) {cout<<p->name<<" ";p->vis=true;}}
if (p->bro!=NULL) {finddeep(p->bro,deep);if (!p->vis) {cout<<p->name<<" ";p->vis=true;}}
return;
}
bool cmp(parentnode a, parentnode b)
{
if (a.fa<b.fa) return true;
else return false;
}
int main()
{
node * Heap;
cin>>n;
A[0].fa=-inf;
for (int i=1;i<=n;i++)
{
scanf("%d %c",&A[i].fa,&A[i].c);//输入每个结点的父亲信息和该点的标号
}
sort(A+1,A+n+1,cmp);
Heap=trans();//返回根节点指针
finddeep(Heap,1);
cout<<endl<<cnt;
return 0;
}*/
/*
#include<bits/stdc++.h>
#define maxn 1000
using namespace std;
int n;
struct node
{
int lson;
int rson;
}tree[maxn];
queue<int> s;
void level_order()//后序遍历
{
while(!s.empty())
{
cout<<(s.front())<<" ";
if (tree[s.front()].lson!=-1)
{
// cout<<"push:"<<tree[s.back()].lson<<endl;
s.push(tree[s.front()].lson);
}
if (tree[s.front()].rson!=-1)
{
// cout<<"push:"<<tree[s.back()].lson<<endl;
s.push(tree[s.front()].rson);
}
s.pop();
}
}
int main()
{
int F,S,root;
cin>>n;
for (int i=1;i<maxn;i++)
{
tree[i].lson=-1;
tree[i].rson=-1;
}
for (int i=1;i<=n;i++)
{
int s_size;
scanf("%d%d%d",&s_size,&F,&S);
if (s_size==1)
tree[F].lson=S;
else tree[F].rson=S;
//tree[F].num=F;
//tree[S].num=S;
}
cout<<"root:";
cin>>root;
while(!s.empty()) s.pop();
cout<<"level-order:";
s.push(root);
level_order();
cout<<endl;
}*/<file_sep>#include<bits/stdc++.h>
using namespace std;
#define maxn 1000
int a[maxn];
int n;
int cnt[8][2]={0};
void quicksort(int l,int r)
{
int i=l,j=r,mid=a[(i+j)/2];
do
{
while(a[i]<mid) {i++;cnt[1][0]++;}
while(a[j]>mid) {j--;cnt[1][0]++;}
if (i<=j)
{
swap(a[i],a[j]);
i++;
j--;
cnt[1][1]+=3;
}
}while(i<=j);
if (i<r) quicksort(i,r);
if (l<j) quicksort(l,j);
}
void maxheapify(int s,int e)
{
int dad=s;
int son=dad*2+1;
while(son<=e)
{
cnt[2][0]++;
if (son+1<=e && a[son]<a[son+1])
{son++;cnt[2][1]++;}
if (a[dad]>a[son])
return;
else
{
swap(a[dad],a[son]);
dad=son;
son=dad*2+1;
}
}
}
void heapsort(int len)
{
for (int i=len/2-1;i>=0;i--)
maxheapify(i,len-1);
for (int i=len-1;i>0;i--)
{
swap(a[0],a[i]);
cnt[2][1]+=3;
maxheapify(0,i-1);
}
}
void Merge(int A[],int TA[],int S,int MID,int E)
{
int i=S, j=MID+1, k=S;
while(i!=MID+1 && j!=E+1)
{
cnt[3][0]++;
if (A[i]>A[j])
TA[k++]=A[j++];
else
TA[k++]=A[i++];
cnt[3][1]++;
}
while(i!=MID+1)
{
cnt[3][1]++;
TA[k++]=A[i++];
}
while(j!=E+1)
{
cnt[3][1]++;
TA[k++]=A[j++];
}
for (i=S;i<=E;i++)
{
cnt[3][1]++;
A[i]=TA[i];
}
}
void Mergesort(int A[],int TA[],int s,int e)
{
int mid;
if (s<e)
{
mid=s+(e-s)/2;
Mergesort(A,TA,s,mid);
//cnt[3][1]+=2;
Mergesort(A,TA,mid+1,e);
//cnt[3][1]+=2;
Merge(A,TA,s,mid,e);
}
}
int getkey(int x,int m)
{
return (x%(int)pow(10,m)/(int)pow(10,m-1));
}
void radixsort(int len,int key)
{
queue<int>t[10];
for (int i=0;i<10;i++)
while(!t[i].empty())
t[i].pop();
for (int k=1;k<=key;k++)
{
for (int i=0;i<len;i++)
{
cnt[4][1]++;
t[getkey(a[i],k)].push(a[i]);
}
int cnts=-1;
for (int i=0;i<10;i++)
{
while(!t[i].empty())
{
cnt[4][1]++;
a[++cnts]=t[i].front();
t[i].pop();
cnt[4][1]++;
}
}
}
}
void selectsort(int len)
{
for (int i=0;i<len-1;i++)
{
cnt[5][1]++;
for (int j=i+1;j<len;j++)
{
cnt[5][1]++;
cnt[5][0]++;
if (a[i]>a[j]) {cnt[5][1]+=3;swap(a[i],a[j]);}
}
}
}
void insert_sort(int *array,int l)
{
int i,j;
int temp;
for(i=1;i<l;i++)
{
cnt[6][1]++;
temp=*(array+i);
cnt[6][1]+=2;
for(j=i;j>0&&*(array+j-1)>temp;j--)
{
cnt[6][0]+=2;
*(array+j)=*(array+j-1);
cnt[6][1]+=2;
}
*(array+j)=temp;
cnt[6][1]+=3;
}
}
void shellInsert(int array[],int n,int dk)
{
int i,j,temp;
for(i=dk;i<n;i++)//分别向每组的有序区域插入
{
cnt[7][1]++;
temp=array[i];
for(j=i-dk;(j>=i%dk)&&array[j]>temp;j-=dk)//比较与记录后移同时进行
{
cnt[7][0]+=2;
cnt[7][1]++;
array[j+dk]=array[j];
cnt[7][1]++;
}
cnt[7][0]++;
if(j!=i-dk)
{
array[j+dk]=temp;//插入
cnt[7][1]+=3;
}
}
}
//计算Hibbard增量
int dkHibbard(int t,int k)
{
return (int)(pow(2,t-k+1)-1);
}
//希尔排序
void shellSort(int array[],int n,int t)
{
void shellInsert(int array[],int n,int dk);
int i;
for(i=1;i<=t;i++)
{
shellInsert(array,n,dkHibbard(t,i));
cnt[7][1]++;
}
}
void print(string STR)
{
cout<<STR<<" Result:"<<endl;
for (int i=0;i<n;i++)
cout<<a[i]<<" ";
cout<<endl;
}
int main()
{
int b[maxn];
int c[maxn];
int keynum;
n=100;
for (int i=0;i<n;i++)
{
b[i]=rand()%1000;
cout<<b[i]<<" ";
a[i]=b[i];
}
quicksort(0,n-1);
print("quicksort");
for (int i=0;i<n;i++)
a[i]=b[i];
heapsort(n);
print("heapsort");
for (int i=0;i<n;i++)
a[i]=b[i];
Mergesort(a,c,0,n-1);
print("mergesort");
for (int i=0;i<n;i++)
a[i]=b[i];
cout<<"please input radixsort keynum:"<<endl;
cin>>keynum;
radixsort(n,keynum);
print("radixsort");
for (int i=0;i<n;i++)
a[i]=b[i];
insert_sort(a,n);
print("insertion sort");
for (int i=0;i<n;i++)
a[i]=b[i];
selectsort(n);
print("select sort");
for (int i=0;i<n;i++)
a[i]=b[i];
shellSort(a,n,(int)(log(n+1)/log(2)));
print("Shell");
cout<<"\t\t\t\t compare times"<<"\t"<<"move times"<<endl;
cout<<"quicksort:O(nlogn)-O(n^2):\t"<<cnt[1][0]<<"\t\t"<<cnt[1][1]<<endl;
cout<<"heapsort:O(nlogn)-O(nlogn):\t"<<cnt[2][0]<<"\t\t"<<cnt[2][1]<<endl;
cout<<"mergesort:O(nlogn)-O(nlogn):\t"<<cnt[3][0]<<"\t\t"<<cnt[3][1]<<endl;
cout<<"radixsort:O(d(n+rd))-O(d(n+rd)):"<<cnt[4][0]<<"\t\t"<<cnt[4][1]<<endl;
cout<<"insertion sort:O(n^2)-O(n^2):\t"<<cnt[6][0]<<"\t\t"<<cnt[6][1]<<endl;
cout<<"select sort:O(n^2)-O(n^2):\t"<<cnt[5][0]<<"\t\t"<<cnt[5][1]<<endl;
cout<<"shell sort:O(n^(1.5))-O(n^2):\t"<<cnt[7][0]<<"\t\t"<<cnt[7][1]<<endl;
return 0;
}
<file_sep>/*#include<bits/stdc++.h>
uing namespace std;
struct node
{
int key;
node * next;
};
int n;
node *Build()
{
cout<<"Please input the number:";
cin>>n;
node *h=(node *)malloc(sizeof(node));
node *p1=h;
node *p2=NULL;
cin>>h->key;
for (int i=1;i<=n-1;i++)
{
p2= (node *)malloc(sizeof(node));
p1->next=p2;
cin>>p2->key;
p1=p2;
}
p1->next=h;
return h;
}
node * Match(node *hp,int km)
{
node *p=hp;
for (int i=1;i<=n;i++)
if (hp->key==km) return hp;
else hp=hp->next;
return NULL;
}
node * search(node* H,node* t,int K)
{
if (t->key==K) return t;
if (t->key>K) return Match(H,K);
else return Match(t,K);
}
void init()
{
int q;
node *head=Build();
node *t=head;
cout<<"please input the query number:";
cin>>q;
for (int i=1;i<=q;i++)
{
int Key;
cin>>Key;
node *t1=search(head,t,Key);
if (t1!=NULL)
{
t=t1;
cout<<"Hash position is:"<<t<<endl;
}
else cout<<"Not found!"<<endl;
}
}
int main()
{
init();
return 0;
}
*/
#include<bits/stdc++.h>
#define maxn 1000
using namespace std;
struct node
{
int num;
node * lson;
node * rson;
node * fa;
int left;
};
int n,m;
bool flag;
int number;
void insert(int x,node *hp)
{
if (x<hp->num)
{
if (hp->lson!=NULL)
{
insert(x,hp->lson);
return;
}
else
{
node*p1=(node *)malloc(sizeof(node));
hp->lson=p1;
hp->left=0;
p1->lson=NULL;p1->rson=NULL;p1->fa=hp;
p1->num=x;
return;
}
}
else
{
if (hp->rson!=NULL)
{
insert(x,hp->rson);
return;
}
else
{
node*p1=(node *)malloc(sizeof(node));
hp->rson=p1;
hp->left=0;
p1->lson=NULL;p1->rson=NULL;p1->fa=hp;
p1->num=x;
return;
}
}
}
void search(node *hp,int Min,int Max)
{
if (hp->lson!=NULL)
search(hp->lson,Min,Max);
if (hp->num>=Min && hp->num<=Max)
cout<<hp->num<<" ";
if (hp->rson!=NULL)
search(hp->rson,Min,Max);
}
void setup(node *hp)
{
if (hp->lson!=NULL)
setup(hp->lson);
number++;
hp->left=number;
if (hp->rson!=NULL)
setup(hp->rson);
}
void del(node *hp,int inx)
{
if (flag) return;
if (inx<hp->num)
del(hp->lson,inx);
if (inx>hp->num)
del(hp->rson,inx);
if (inx==hp->num)
{
flag=true;
if (hp->lson==NULL)
{
hp->lson->fa=hp->fa;
if (hp->fa->lson==hp)
hp->fa->lson=hp->rson;
if (hp->fa->rson==hp)
hp->fa->rson=hp->rson;
}
if (hp->lson!=NULL)
{
node*p1=hp->lson;
if (p1->rson==NULL)
{
hp->lson->fa=hp->fa;
if (hp->fa->lson==hp)
hp->fa->lson=hp->lson;
if (hp->fa->rson==hp)
hp->fa->rson=hp->lson;
hp->lson->rson=hp->rson;
hp->rson->fa=hp->lson;
}
else
{
while(p1->rson!=NULL)
p1=p1->rson;
p1->fa->rson=p1->lson;
p1->lson->fa=p1->fa;
p1->lson=hp->lson;
p1->rson=hp->rson;
p1->fa=hp->fa;
if (hp->fa->lson==hp)
hp->fa->lson=p1;
if (hp->fa->rson==hp)
hp->fa->rson=p1;
hp->rson->fa=p1;
hp->lson->fa=p1;
}
free(hp);
}
}
return;
}
void init()
{
cout<<"please input the number:";
cin>>n;
int a;
node *root=(node *)malloc(sizeof(node));
node *h=(node *)malloc(sizeof(node));
h->lson=NULL;
h->rson=NULL;
h->fa=root;
cin>>h->num;
for (int i=1;i<n;i++)
{
cin>>a;
insert(a,h);
}
cout<<"please input the query number:";
cin>>m;
for (int i=0;i<m;i++)
{
int A,B;
cin>>A>>B;
search(h,A,B);
}
/*
cout<<"please input the delete number:";
cin>>m;
for (int i=0;i<m;i++)
{
int A;
cin>>A;
flag=false;
del(h,A);
if (flag) cout<<"deleted completed!"<<endl;
else cout<<"Not Found!";
}*/
}
int main()
{
init();
}
<file_sep>#Chatroom 1.0
要在局域网实现通信只需要将Host 改成局域网中server所在的IP即可
###功能
help : 返回所有命令的使用
login nickname 登陆用户名
send message 发送信息
help function 返回function命令的使用方法
效果如图:

<file_sep>#include <stdio.h>
#include <Status.h>
/* 宏定义 */
#define MAX 256
/* 类型定义 */
typedef struct
{
int x;
int y;
int value;
}Node;
typedef Node SNode;
/* 函数原型 */
Status Algo_5_19(int a[MAX][MAX], int row, int col, SNode p[MAX]);
void Min_row(int a[MAX][MAX], int col, int i, Node min[MAX]);
Status IsMax_col(int a[MAX][MAX], int row, Node v);
int main(int argc, char *argv[])
{
SNode p[MAX];
int row = 4;
int col = 4;
int a[MAX][MAX] = {{ 1, 1, 1, 1},
{1, 1, 1, 1},
{1, 1, 1, 1},
{ 1, 1, 1, 1}};
int i, j, k;
for(i=0; i<row; i++)
{
for(j=0; j<col; j++)
printf("%3d ", a[i][j]);
printf("\n");
}
printf("\n");
if(Algo_5_19(a, row, col, p))
{
printf("NO\n");
for(k=1; k<=p[0].value; k++)
printf("%d %d %d\n", p[k].x, p[k].y, p[k].value);
}
else
printf("NO\n");
printf("\n");
return 0;
}
Status Algo_5_19(int a[MAX][MAX], int row, int col, SNode p[MAX])
{
int i, k;
Node min[MAX];
p[0].value = 0;
for(i=0; i<row; i++)
{
Min_row(a, col, i, min);
for(k=1; k<=min[0].value; k++)
{
if(IsMax_col(a, row, min[k]))
{
p[0].value++;
p[p[0].value] = min[k];
}
}
}
if(p[0].value)
return TRUE;
else
return FALSE;
}
/* 找出矩阵a中第i行的最小值 */
void Min_row(int a[MAX][MAX], int col, int i, Node min[MAX])
{
int j;
min[0].value = 0;
for(j=0; j<col; j++)
{
if(!j || a[i][j]==min[j-1].value)
min[0].value++;
else
{
if(a[i][j]<min[j-1].value)
min[0].value = 1;
else
continue;
}
min[min[0].value].x = i;
min[min[0].value].y = j;
min[min[0].value].value = a[i][j];
}
}
/* 判断v中的值是否为其所属列的最大值 */
Status IsMax_col(int a[MAX][MAX], int row, Node v)
{
int i;
for(i=0; i<row; i++)
{
if(a[i][v.y]>v.value)
return FALSE;
}
return TRUE;
}
<file_sep>#include <bits/stdc++.h>
#define N 8
#define INF 999999999
using namespace std;
int dx[9]={0,2,2,-2,-2,1,1,-1,-1};
int dy[9]={0,-1,1,1,-1,-2,2,-2,2};
bool MAP[N+1][N+1];
int sign[N+1][N+1]={0};
void print()
{
for (int i=1;i<=N;i++)
{
for (int j=1;j<=N;j++)
cout<<sign[i][j]<<" ";
cout<<endl;
}
}
bool judge(int i,int j)
{
if (!MAP[i][j]&&i>0&&i<N+1&&j>0&&j<N+1)
return true;
else return false;
}
int judgepath(int x_,int y_)
{
int c;
int tag=0;
for (c = 0; c < 8; c++)
{
if (judge(x_ + dx[c], y_ + dy[c]))
{
tag++;
}
}
return tag;
}
void walk(int x,int y,int cnt)
{
cout<<x<<" "<<y<<endl;
if (cnt==N*N)
{
print();
exit(0);
}
int MIN=INF,rec=-1,rec2=0;
for (int i=1;i<=8;i++)
{
if (!judge(x+dx[i],y+dy[i]))
continue;
int path=judgepath(x+dx[i],y+dy[i]);
if (MIN>path)
{
MIN=path;
if (rec) rec2=rec;
rec=i;
}
}
if (MIN!=INF)
{
sign[x+dx[rec]][y+dy[rec]]=cnt+1;
MAP[x+dx[rec]][y+dy[rec]]=true;
walk(x+dx[rec],y+dy[rec],cnt+1);
MAP[x+dx[rec]][y+dy[rec]]=false;
sign[x+dx[rec]][y+dy[rec]]=0;
}
return;
}
void init()
{
int x0,y0;
memset(MAP,sizeof(MAP),false);
cin>>x0>>y0;
sign[x0][y0]=1;
MAP[x0][y0]=true;
walk(x0,y0,1);
}
int main()
{
init();
return 0;
}<file_sep>#include<bits/stdc++.h>
using namespace std;
const int maxn=50;
const int INF=0x3f3f3f3f;
struct node{int from,to,cost;}edge[maxn];
int par[maxn];
int gr[maxn][maxn];
bool visp[maxn];
bool acte[maxn][maxn];
bool vise[maxn][maxn];
int st,tot,n,m;
int find(int num)
{
return par[num]==num?num:par[num]=find(par[num]);
}
int cmp(const void *a,const void *b)
{
return(((node*)a)->cost-((node*)b)->cost);
}
void init()
{
int a,b,c;
memset(visp,sizeof(visp),false);
memset(acte,sizeof(acte),false);
memset(vise,sizeof(vise),false);
cin>>n>>m;
for (int i=0;i<maxn;i++)
for (int j=0;j<maxn;j++)
gr[i][j]=INF;
cout<<"please input the (v,w) and value:"<<endl;
for (int i=1;i<=m;i++)
{
cin>>a>>b>>c;
gr[a][b]=c;
gr[b][a]=c;
}
cout<<"please input the start point:"<<endl;
cin>>st;
visp[st]=true;
for (int i=1;i<=n;i++)
if (visp[i]==false&&gr[st][i]!=INF) acte[st][i]=true;
tot=1;
}
void merge()
{
while (tot<n)
{
int l=INF,p,v,w;
for (int i=1;i<=n;i++)
for (int j=1;j<=n;j++)
if (gr[i][j]!=INF&& acte[i][j] && !(visp[i]&&visp[j]))
if (l>gr[i][j])
{ if (visp[i]&&!visp[j]) {p=j;l=gr[i][j];v=i;w=j;} if (!visp[i]&&visp[j]) {p=i;l=gr[i][j];v=i;w=j;}}
visp[p]=true;
cout<<p;
vise[v][w]=true;
tot++;
for (int i=1;i<=n;i++)
if (visp[i]==false&&gr[p][i]!=INF) acte[p][i]=true;
}
}
void prim()
{
init();
int ans=0;
merge();
for (int i=1;i<=n;i++)
for (int j=1;j<=n;j++)
if (vise[i][j])
{
cout<<i<<'-'<<j<<endl;
ans+=gr[i][j];
}
cout<<"total value:"<<ans;
}
int main()
{
cin>>n>>m;
if (n*n>m*log(m))
{
cout<<"Use kruskal algorithm"<<endl;
for(int i=0;i<=n;i++)
par[i]=i;
for(int i=0;i<m;i++)
scanf("%d%d%d",&edge[i].from,&edge[i].to,&edge[i].cost);
qsort(edge,m,sizeof(node),cmp);
int cnt=0,res=0;
for(int i=0;i<m;i++)
{
int sa=find(edge[i].from),sb=find(edge[i].to);
if(sa==sb)
continue;
res+=edge[i].cost;
par[sa]=sb;
cnt++;
if(cnt==n-1)
break;
}
if(cnt>=n-1)
printf("%d\n",res);
else
printf("?\n");
}
else {
cout<<"Use prim algorithm"<<endl;
prim();
}
return 0;
}
/*
input:
6 9
1 2 6
1 3 3
2 3 2
2 4 5
3 4 3
4 6 3
4 5 2
3 5 4
5 6 5
*/<file_sep>#include<bits/stdc++.h>//一笔画
#define for(i,x,p) for(int i=x;i<=p;++i)
#define maxn 100
using namespace std;
int gr[maxn][maxn];
int a[maxn]={0},tot=0,e=0,n;
void init()
{
cin>>n;
cout<<"please input the matrix:"<<endl;
for(i,1,n)
for(j,1,n)
cin>>gr[i][j];
}
int draw(int v)
{
int i,k=0;
if (tot==e) return 1;
for(i,1,n)
{
if (gr[v][i]==1)
{
k=1;
gr[v][i]=0;
gr[i][v]=0;
e+=2;
if (draw(i))
{
printf("%d->",i);
return 1;
}
else
{
gr[v][i]=1;
gr[i][v]=1;
e-=2;
k=0;
}
}
}
if (k==0) return 0;
}
int main()
{
int k=0,v;
init();
for(i,1,n)
{
for (j,1,n)
if (gr[i][j]==1) a[i]++;
tot+=a[i];
if (a[i]%2==1)
{
k++;v=i;
}
}
if (k>2) printf("No solution\n");
else
{
printf("%d->",v);
draw(v);//从v点开始一笔画
printf("%d",v);
}
return 0;
} <file_sep>from mpl_toolkits import mplot3d
import matplotlib.pyplot as plt
import matplotlib
import numpy as np
import math
sigma1=float(input('σ1:'))
mu1=float(input('μ1:'))
sigma2=float(input('σ2:'))
mu2=float(input('μ2:'))
rho=float(input('ρ:'))
x0=float(input('x0:'))
y0=float(input('y0:'))
x1=float(input('x1:'))
y1=float(input('y1:'))
def r(x, y):
return np.sqrt(x**2+y**2)
def p(xi,yi):
return np.log(1/(2*math.pi*sigma1*sigma2*math.sqrt(1-rho**2))*np.exp(-1/(2*math.sqrt(1-rho**2))*(((xi-mu1)/sigma1)**2+((yi-mu2)\
/sigma2)**2-2*rho*(xi-mu1)*(yi-mu2)/(sigma1*sigma2))))
def f(x,y):
g=0
A=(x1-x0)*(y1-y0)
Ai=[]
fq=[]
fp=[]
Ai=Ai+[(x1-x)*(y1-y)]+[(x-x0)*(y1-y)]+[(x-x0)*(y-y0)]+[(x1-x)*(y-y0)]
fq=fq+[p(x,y0)]+[p(x1,y)]+[p(x,y1)]+[p(x0,y)]
fp=fp+[p(x0,y0)]+[p(x1,y0)]+[p(x1,y1)]+[p(x0,y1)]
for i in range(4):
g=g+(Ai[i]+Ai[(i+1)%4])*fq[i]-Ai[i]*fp[i]
return np.exp(g/A)
x = np.linspace(x0+0.01,x1-0.01,100)
y = np.linspace(x0+0.01,y1-0.01,100)
X, Y = np.meshgrid(x, y)
Z = f(X,Y)
plt.figure(1)
ax = plt.axes(projection='3d')
ax.plot_surface(X, Y, Z, rstride=1, cstride=1, cmap='viridis', edgecolor='none')
ax.set_title('interp2 f(x,y)')
ax.set_xlabel('x')
ax.set_ylabel('y')
ax.set_zlabel('z')
x = np.linspace(x0+0.01,x1-0.01,100)
y = np.linspace(x0+0.01,y1-0.01,100)
X, Y = np.meshgrid(x, y)
Z = np.exp(p(X,Y))
plt.figure(2)
ax = plt.axes(projection='3d')
ax.plot_surface(X, Y, Z, rstride=1, cstride=1, cmap='viridis', edgecolor='none')
ax.set_title('function f(x,y)')
ax.set_xlabel('x')
ax.set_ylabel('y')
ax.set_zlabel('z')
plt.show()<file_sep>#include <bits/stdc++.h>//dijkstra关键点路径
#define INF 0x3f3f3f3f
using namespace std;
int map_[110][110];
bool visited[10010];
int dis[10010];
int n,m;
void dijstra(int u)
{
int i,j;
int start=u;
memset(visited,false,sizeof(visited));
visited[start]=true;
memset(dis,INF,sizeof(dis));
for (i=1;i<=n;++i)
{
dis[i]=min(dis[i],map_[start][i]);
}
for (i=1;i<=n-1;++i)
{
int minn=INF;
for (j=1;j<=n;++j)
if (!visited[j]&&minn>dis[j]) {minn=dis[j];start=j;}
visited[start]=1;
for (int j=1;j<=n;++j)
dis[j]=min(dis[j],dis[start]+map_[start][j]);
}
}
int main()
{
int a,b,c;
int i,st,en,im;
cout<<"Please input the point number and edge number:"<<endl;
cin>>n>>m;
cout<<"input the (v,w) and the value of the edge:"<<endl;
memset(map_,88,sizeof(map_));
for (i=1;i<=m;++i)
{
cin>>a>>b>>c;
map_[a][b]=c;
map_[b][a]=c;
}
for (i=1;i<n;++i)
map_[i][i]=0;
cout<<"please input the start and target point:"<<endl;
cin>>st>>en;
cout<<"please input the important point:"<<endl;
cin>>im;
dijstra(st);
int d1=im,d2;
cout<<st<<"-"<<im<<":"<<dis[im]<<endl;
d1=dis[im];
dijstra(im);
cout<<im<<"-"<<en<<":"<<dis[en]<<endl;
d2=dis[en];
cout<<"total:"<<d1+d2;
return 0;
}
<file_sep>#include <bits/stdc++.h>//dijkstra
#define INF 0x3f3f3f3f
using namespace std;
int map_[110][110];
bool visited[10010];
int dis[10010];
int n,m;
void dijstra(int u)
{
int i,j;
int start=u;
memset(visited,false,sizeof(visited));
visited[start]=true;
memset(dis,INF,sizeof(dis));
for (i=1;i<=n;++i)
{
dis[i]=min(dis[i],map_[start][i]);
}
for (i=1;i<=n-1;++i)
{
int minn=9999999;
for (j=1;j<=n;++j)
if (!visited[j]&&minn>dis[j]) {minn=dis[j];start=j;}
visited[start]=1;
for (int j=1;j<=n;++j)
dis[j]=min(dis[j],dis[start]+map_[start][j]);
}
}
int main()
{
int a,b,c;
int i,st,en;
cout<<"Please input the point number and edge number:"<<endl;
cin>>n>>m;
cout<<"input the (v,w) and the value of the edge:"<<endl;
memset(map_,88,sizeof(map_));
for (i=1;i<=m;++i)
{
cin>>a>>b>>c;
map_[a][b]=c;
map_[b][a]=c;
}
for (i=1;i<n;++i)
map_[i][i]=0;
cout<<"please input the start and target point:"<<endl;
cin>>st>>en;
dijstra(st);
cout<<dis[en];
return 0;
}
<file_sep>#include<bits/stdc++.h>//Prim
#define INF 0x3f3f3f3f
#define maxn 100
using namespace std;
int n,m,st,tot;
int gr[maxn][maxn];
bool visp[maxn];
bool acte[maxn][maxn];
bool vise[maxn][maxn];
void init()
{
int a,b,c;
memset(visp,sizeof(visp),false);
memset(acte,sizeof(acte),false);
memset(vise,sizeof(vise),false);
cin>>n>>m;
for (int i=0;i<maxn;i++)
for (int j=0;j<maxn;j++)
gr[i][j]=INF;
cout<<"please input the (v,w) and value:"<<endl;
for (int i=1;i<=m;i++)
{
cin>>a>>b>>c;
gr[a][b]=c;
gr[b][a]=c;
}
cout<<"please input the start point:"<<endl;
cin>>st;
visp[st]=true;
for (int i=1;i<=n;i++)
if (visp[i]==false&&gr[st][i]!=INF) acte[st][i]=true;
tot=1;
}
void merge()
{
while (tot<n)
{
int l=INF,p,v,w;
for (int i=1;i<=n;i++)
for (int j=1;j<=n;j++)
if (gr[i][j]!=INF&& acte[i][j] && !(visp[i]&&visp[j]))
if (l>gr[i][j])
{ if (visp[i]&&!visp[j]) {p=j;l=gr[i][j];v=i;w=j;} if (!visp[i]&&visp[j]) {p=i;l=gr[i][j];v=i;w=j;}}
visp[p]=true;
cout<<p;
vise[v][w]=true;
tot++;
for (int i=1;i<=n;i++)
if (visp[i]==false&&gr[p][i]!=INF) acte[p][i]=true;
}
}
int main()
{
init();
int ans=0;
merge();
for (int i=1;i<=n;i++)
for (int j=1;j<=n;j++)
if (vise[i][j])
{
cout<<i<<'-'<<j<<endl;
ans+=gr[i][j];
}
cout<<"total value:"<<ans;
return 0;
}<file_sep>#include<bits/stdc++.h>
using namespace std;
struct tri
{
int x;
int y;
int ord;
int num;
}A[502],B[502];//A,B矩阵的系数矩阵
int nA,mA,nB,mB,numA,numB;
struct Matrix
{
int m[25][25];
int N;
int M;
tri data[502];
};
Matrix Multiply()
{
Matrix r1;
if (mA!=nB)
{cout<<"Multipy ERROR!"<<endl;return r1;}
int arpos[25]={0};
int brpos[25]={0};
int rrpos[25]={0};
for (int i=1;i<=numA;i++)
{
if (arpos[A[i].x]==0) arpos[A[i].x]=i;
}
for (int i=1;i<nA;i++)
if (arpos[i]==0) arpos[i]=arpos[i+1];
if (arpos[nA]==0) arpos[nA]=numA+1;
for (int i=1;i<=numB;i++)
{
if (brpos[B[i].x]==0) brpos[B[i].x]=i;
}
for (int i=1;i<nB;i++)
if (brpos[i]==0) brpos[i]=brpos[i+1];
if (brpos[nB]==0) brpos[nB]=numB+1;
r1.N=nA;
r1.M=mB;
int numr1=0;
for (int row=1;row<=r1.N;row++)
{
int ctemp[25]={0};
int tp;
rrpos[row]=numr1+1;
if (row<r1.N) tp=arpos[row+1];
else tp=numA+1;
for (int p=arpos[row];p<tp;p++)
{
int brow=A[p].y;
int t;
if (brow<nB) t=brpos[brow+1];
else t=numB+1;
for (int q=brpos[brow];q<t;++q)
{
int col=B[q].y;
ctemp[col]+=(A[p].num*B[q].num);
}
}
for (int col=1;col<=r1.M;++col)
if (ctemp[col])
{
if (++numr1>r1.N*r1.M) cout<<"ERROR"<<endl;
r1.data[numr1].x=row;r1.data[numr1].y=col;r1.data[numr1].num=ctemp[col];
}
}
for (int i=1;i<=numr1;i++)
r1.m[r1.data[i].x][r1.data[i].y]=r1.data[i].num;
return r1;
}
Matrix Add()
{
Matrix r1;
if (nA!=nB||mA!=mB)
{cout<<"Add ERROR!"<<endl;return r1;}
for (int i=1;i<=numA;i++)
{
r1.m[A[i].x][A[i].y]=A[i].num;
}
for (int i=1;i<=numB;i++)
{
r1.m[B[i].x][B[i].y]+=B[i].num;
}
r1.N=nA;
r1.M=mA;
return r1;
}
Matrix Sub()
{
Matrix r1;
if (nA!=nB||mA!=mB)
{cout<<"Sub ERROR!"<<endl;return r1;}
for (int i=1;i<=numA;i++)
{
r1.m[A[i].x][A[i].y]=A[i].num;
}
for (int i=1;i<=numB;i++)
{
r1.m[B[i].x][B[i].y]-=B[i].num;
}
r1.N=nA;
r1.M=mA;
return r1;
}
void init()
{
cout<<"Please input nA,mA,tA:"<<endl;
cin>>nA>>mA>>numA;
for (int i=1;i<=numA;i++)
{
cin>>A[i].x>>A[i].y>>A[i].num;
//A[i].ord=(A[i].x-1)*mA+A[i].y;
}
cout<<"Please input nB,mB,tB:"<<endl;
cin>>nB>>mB>>numB;
for (int i=1;i<=numB;i++)
{
cin>>B[i].x>>B[i].y>>B[i].num;
//B[i].ord=(B[i].x-1)*mB+B[i].y;
}
}
int main()
{
Matrix r;
init();
int op;
cout<<"Please input the operation:"<<endl;
cin>>op;
if (op==1) r=Multiply();
if (op==2) r=Add();
if (op==3) r=Sub();
for (int i=1;i<=r.N;i++)
{
for (int j=1;j<=r.M;j++)
cout<<r.m[i][j]<<" ";
cout<<endl;
}
return 0;
}
<file_sep>#include <stdio.h>
#define MAX 256
typedef struct
{
int x;
int y;
int value;
}Node;
typedef Node SNode;
bool Algo_5_19(int a[MAX][MAX], int row, int col, SNode p[MAX]);
void Min_row(int a[MAX][MAX], int col, int i, Node min[MAX]);
bool IsMax_col(int a[MAX][MAX], int row, Node v);
int main(int argc, char *argv[])
{
SNode p[MAX];
int row,col
int a[MAX][MAX] ;
int i, j, k;
cin>>row,col;
for(i=0; i<row; i++)
{
for(j=0; j<col; j++)
scanf("%3d ",&a[i][j]);
}
if(Algo_5_19(a, row, col, p))
{
printf("此矩阵中存在马鞍点...\n");
for(k=1; k<=p[0].value; k++)
printf("第 %d 行第 %d 列的马鞍点 %d\n", p[k].x, p[k].y, p[k].value);
}
else
printf("此矩阵中不存在马鞍点...\n");
printf("\n");
return 0;
}
bool Algo_5_19(int a[MAX][MAX], int row, int col, SNode p[MAX])
{
int i, k;
Node min[MAX];
p[0].value = 0;
for(i=0; i<row; i++)
{
Min_row(a, col, i, min);
for(k=1; k<=min[0].value; k++)
{
if(IsMax_col(a, row, min[k]))
{
p[0].value++;
p[p[0].value] = min[k];
}
}
}
if(p[0].value)
return true;
else
return false;
}
void Min_row(int a[MAX][MAX], int col, int i, Node min[MAX])
{
int j;
min[0].value = 0;
for(j=0; j<col; j++)
{
if(!j || a[i][j]==min[j-1].value)
min[0].value++;
else
{
if(a[i][j]<min[j-1].value)
min[0].value = 1;
else
continue;
}
min[min[0].value].x = i;
min[min[0].value].y = j;
min[min[0].value].value = a[i][j];
}
}
bool IsMax_col(int a[MAX][MAX], int row, Node v)
{
int i;
for(i=0; i<row; i++)
{
if(a[i][v.y]>v.value)
return false;
}
return true;
}
<file_sep># data-structure-homework<file_sep>#include<bits/stdc++.h>
#define maxn 1000
const int INF=0x3f3f3f3f;
using namespace std;
int n,m,q,ans=0,pth=INF,route=0;
int G[maxn][maxn];
bool vis[maxn][maxn];
int vrel[100];
bool rec[maxn][maxn];
string H[maxn];
void dfs(int x,int y)
{
if (x==y)
{
route=route+ans;
if (pth>ans)
{
pth=min(pth,ans);
for (int i=1;i<=n;i++)
for (int j=1;j<=n;j++)
rec[i][j]=vis[i][j];
}
return;
}
for (int i=1;i<=n;i++)
if (G[x][i]!=INF&&!vis[x][i])
{
vis[x][i]=true;
vis[i][x]=true;
ans=ans+G[x][i];
dfs(i,y);
ans=ans-G[x][i];
vis[x][i]=false;
vis[i][x]=false;
}
}
int Hash(string a)
{
int key=0;
for (int i=0;i<a.length();i++)
key=key+a[i];
key=key%100;
while (H[key]!="") key=(key+1)%100;
H[key]=a;
return key;
}
int Index(string a)
{
int k=0;
for (int i=0;i<a.length();i++)
k=k+a[i];
k=k%100;
while(H[k]!=a) k++;
return k;
}
void printmap(int x)
{
cout<<x;
for (int i=1;i<=n;i++)
if (rec[x][i])
{
cout<<"->";
rec[x][i]=0;
rec[i][x]=0;
printmap(i);
break;
}
}
void init()
{
vrel[55]=4;
vrel[34]=4;
vrel[36]=3;
vrel[49]=3;
vrel[62]=8;
vrel[63]=8;
vrel[27]=5;
vrel[41]=5;
vrel[57]=15;
vrel[70]=30;
vrel[32]=16;
vrel[33]=12;
vrel[39]=23;
vrel[64]=23;
vrel[70]=16;
for (int i=0;i<maxn;i++)
for (int j=0;j<maxn;j++)
{
vis[i][j]=false;
G[i][j]=INF;
}
cout<<"please input the vertex and edge number:";
cin>>n>>m;
for (int i=1;i<=m;i++)
{
int pre,aft;
string v;
cout<<"please add <v1,v2,relate>:";
scanf("%d%d ",&pre,&aft);
getline(cin,v);
//cout<<Index(v);
G[pre][aft]=vrel[Index(v)];
G[aft][pre]=vrel[Index(v)];
}
cout<<"please input the query times:";
cin>>q;
for (int i=1;i<=q;i++)
{
int s,e;
cout<<"start vertex and terminated vertex:";
cin>>s>>e;
ans=0;
route=0;
pth=INF;
memset(vis,false,sizeof(vis));
dfs(s,e);
if (route)
{
cout<<"friend distance:"<<route<<endl;
cout<<"strongest path:"<<pth<<endl;
printmap(s);
}
else cout<<"No relationship!"<<endl;
}
}
int main()
{
string a[15]={"mother","father","son","dauther","boyfriend","girlfriend","wife","husband","classmate","stranger","teacher","friend","boss","employee","student"};
for (int i=0;i<15;i++)
Hash(a[i]);
init();
return 0;
}
<file_sep>#include<bits/stdc++.h>
using namespace std;
const int maxn=1e5+5;
struct node{int from,to,cost;}edge[maxn];
int par[maxn];
int find(int num)
{
return par[num]==num?num:par[num]=find(par[num]);
}
int cmp(const void *a,const void *b)
{
return(((node*)a)->cost-((node*)b)->cost);
}
int main()
{
int n,m,i,j;
while(scanf("%d%d",&n,&m)!=EOF&&n)
{
for(i=0;i<=m;i++)
par[i]=i;
for(i=0;i<n;i++)
scanf("%d%d%d",&edge[i].from,&edge[i].to,&edge[i].cost);
qsort(edge,n,sizeof(node),cmp);
int cnt=0,res=0;
for(i=0;i<n;i++)
{
int sa=find(edge[i].from),sb=find(edge[i].to);
if(sa==sb)
continue;
res+=edge[i].cost;
par[sa]=sb;
cnt++;
if(cnt==m-1)
break;
}
if(cnt>=m-1)
printf("%d\n",res);
else
printf("?\n");
}
return 0;
}
<file_sep>#include <bits/stdc++.h>//可达性统计
using namespace std;
const int maxn=30000+7;
int head[maxn],ver[maxn];
int Next[maxn];
int cnt,tot;
int deg[maxn],a[maxn];
int n,m;
bitset<maxn> c[maxn];
void add(int x,int y)
{
ver[++tot]=y,Next[tot]=head[x];
head[x]=tot;
deg[y]++;
}
void toposort()
{
queue<int>q;
for (int i=1;i<=n;++i)
if (deg[i]==0) q.push(i);
while(q.size())
{
int x=q.front();
q.pop();
a[++cnt]=x;
for (int i=head[x];i;i=Next[i])
{
int y=ver[i];
deg[y]--;
if (deg[y]==0) q.push(y);
}
}
}
void solve()
{
int x,y;
for (int i=cnt;i>=1;i--)
{
x=a[i];
c[x][x]=1;
for (int j=head[x];j;j=Next[j]) {
int y=ver[j];
c[x]|=c[y];
}
}
}
int main()
{
int x,y;
scanf("%d%d",&n,&m);
while (m--){
scanf("%d%d",&x,&y);
add(x,y);
}
toposort();
solve();
for (int i=1;i<=n;i++)
printf("%d\n",c[i].count());
return 0;
}<file_sep>#include<bits/stdc++.h>
#define maxn 1000
using namespace std;
int n;
struct node
{
int num;
int lson;
int rson;
bool vis;
bool pri;
}tree[maxn];
stack<node> s;
void pre_order()//先序遍历
{
while(!s.empty())
{
if (!tree[s.top().num].vis)
{
cout<<s.top().num;
tree[s.top().num].vis=true;
}
if (s.top().lson==-1&&s.top().rson==-1) s.pop();
if (s.top().lson!=-1&&!tree[s.top().lson].vis) {//tree[s.top().lson].vis=true;
s.push(tree[s.top().lson]);continue;}
if (s.top().rson!=-1&&!tree[s.top().rson].vis) {//tree[s.top().rson].vis=true;
s.push(tree[s.top().rson]);continue;}
s.pop();
}
}
void mid_order()//中序遍历
{
while(!s.empty())
{
if (s.top().lson==-1&&s.top().rson==-1) {tree[s.top().num].vis=true;cout<<s.top().num;s.pop();}
if (s.top().lson!=-1&&!tree[s.top().lson].vis) {s.push(tree[s.top().lson]);continue;}
if (!tree[s.top().num].vis)
{
cout<<s.top().num;
tree[s.top().num].vis=true;
}
if (s.top().rson!=-1&&!tree[s.top().rson].vis) {s.push(tree[s.top().rson]);continue;}
s.pop();
}
}
void last_order()//后序遍历
{
while(!s.empty())
{
if (s.top().lson==-1&&s.top().rson==-1) {tree[s.top().num].vis=true;cout<<s.top().num;s.pop();}
if (s.top().lson!=-1&&!tree[s.top().lson].vis) {tree[s.top().lson].vis=true;s.push(tree[s.top().lson]);continue;}
if (s.top().rson!=-1&&!tree[s.top().rson].vis) {tree[s.top().rson].vis=true;s.push(tree[s.top().rson]);continue;}
cout<<s.top().num;
s.pop();
}
}
int main()
{
int F,S,root;
cin>>n;
for (int i=1;i<maxn;i++)
{
tree[i].lson=-1;
tree[i].rson=-1;
tree[i].vis=false;
}
for (int i=1;i<=n;i++)
{
int s_size;
scanf("%d%d%d",&s_size,&F,&S);
if (s_size==1)
tree[F].lson=S;
else tree[F].rson=S;
tree[F].num=F;
tree[S].num=S;
}
cin>>root;
while(!s.empty()) s.pop();
s.push(tree[root]);
cout<<"pre-order:";
pre_order();
cout<<endl;
for (int i=1;i<maxn;i++)
tree[i].vis=false;
tree[root].vis=true;
s.push(tree[root]);
cout<<"last-order:";
last_order();
cout<<endl;
for (int i=1;i<maxn;i++)
tree[i].vis=false;
s.push(tree[root]);
cout<<"mid-order:";
mid_order();
}<file_sep>#include<bits/stdc++.h>
using namespace std;
#define maxn 1000
string S;
int n;
void getnext(string b, int *nextval)
{
int len = b.length();
int j = 0 , k = -1 ;
nextval[0] = -1 ;
while(j < len)//查找多次且可重叠时len不能减一,因为该单词的末尾加一的next也需要被下一次查询用到。
{
if(k == -1 || b[k] == b[j])
{
k++;
j++;
// 下面nest数组的优化
if(b[k] != b[j])
nextval[j] = k ;
else
nextval[j] = nextval[k];
}
else
{
k = nextval[k];
}
}
}
void Replace(string &S,string b,string a)
{
int flag[maxn]={0};
getline(cin,S);
int nextval[10009];
getline(cin,b);
getline(cin,a);
getnext(b, nextval);
int lena = a.length() , lenb = b.length();
int i = 0 , j = 0;
int ans = 0 ,d=lena-lenb;
while(i < lena)
{
if(j == -1 || S[i] == b[j])
{
i++ ;
j++ ;
}
else
{
j = nextval[j];
}
if(j == lenb)
{
flag[++ans]=i+1+d*(ans-1);
j = nextval[j] ;
}
}
printf("%d\n" , ans);
for (int i=1;i<=ans;i++)
S.replace(flag[i]-1,lenb,a);
}
int main()
{
string A,B;
cout<<"please input the S string:"<<endl;
getline(cin,S);
cout<<"please input the operate number:"<<endl;
cin>>n;
getchar();
for (int i=1;i<=n;i++)
{
getline(cin,A);
getline(cin,B);
Replace(S,A,B);
}
return 0 ;
}<file_sep>#include<stdio.h>
#include<math.h>
#define MAXNUM 10
int main()
{
void shellSort(int array[],int n,int t);//t为排序趟数
int array[MAXNUM],i;
for(i=0;i<MAXNUM;i++)
scanf("%d",&array[i]);
shellSort(array,MAXNUM,(int)(log(MAXNUM+1)/log(2)));//排序趟数应为log2(n+1)的整数部分
for(i=0;i<MAXNUM;i++)
printf("%d ",array[i]);
printf("\n");
}
//根据当前增量进行插入排序
void shellInsert(int array[],int n,int dk)
{
int i,j,temp;
for(i=dk;i<n;i++)//分别向每组的有序区域插入
{
temp=array[i];
for(j=i-dk;(j>=i%dk)&&array[j]>temp;j-=dk)//比较与记录后移同时进行
array[j+dk]=array[j];
if(j!=i-dk)
array[j+dk]=temp;//插入
}
}
//计算Hibbard增量
int dkHibbard(int t,int k)
{
return (int)(pow(2,t-k+1)-1);
}
//希尔排序
void shellSort(int array[],int n,int t)
{
void shellInsert(int array[],int n,int dk);
int i;
for(i=1;i<=t;i++)
shellInsert(array,n,dkHibbard(t,i));
}
//此写法便于理解,实际应用时应将上述三个函数写成一个函数。
|
a3a5205fc6eaba2bbacb4eb660e636397f4c9f14
|
[
"Markdown",
"C",
"Python",
"C++"
] | 21
|
C++
|
centurion-crawler/data-structure-homework
|
2f47687aebdfbff7fb44c4e87cd23a3abc91920d
|
e53f5554e97cd36325cedd65ca6f82c6933c54bd
|
refs/heads/master
|
<file_sep>from datetime import datetime, timedelta
from nose.tools import raises, assert_items_equal, eq_
from forge import get_files_and_intervals, get_timefile_exact, round_timefile,\
get_timefile, mp3_join
eight = datetime(2014, 5, 30, 20)
nine = datetime(2014, 5, 30, 21)
ten = datetime(2014, 5, 30, 22)
def minutes(n):
return timedelta(minutes=n)
def seconds(n):
return timedelta(seconds=n)
## timefile
def test_timefile_exact():
eq_(get_timefile_exact(eight),
'2014-05/30/rec-2014-05-30-20-00-00-ror.mp3')
## Rounding
def test_rounding_similarity():
eq_(round_timefile(eight), round_timefile(eight+minutes(20)))
assert round_timefile(eight) != round_timefile(nine)
def test_rounding_value():
eq_(round_timefile(eight), eight)
eq_(round_timefile(eight + minutes(20)), eight)
## Rounding + timefile
def test_timefile_alreadyround():
eq_(get_timefile(eight),
'2014-05/30/rec-2014-05-30-20-00-00-ror.mp3')
def test_timefile_toround():
eq_(get_timefile(eight + minutes(20)),
'2014-05/30/rec-2014-05-30-20-00-00-ror.mp3')
## Intervals
@raises(ValueError)
def test_intervals_same():
tuple(get_files_and_intervals(eight, eight))
@raises(ValueError)
def test_intervals_before():
tuple(get_files_and_intervals(nine, eight))
def test_intervals_full_1():
res = list(get_files_and_intervals(eight, nine-seconds(1)))
eq_(len(res), 1)
eq_(res[0][1], 0)
eq_(res[0][2], 0)
def test_intervals_partial_1():
res = list(get_files_and_intervals(eight, nine-minutes(10)))
eq_(len(res), 1)
eq_(res[0][1], 0)
eq_(res[0][2], 10*60 - 1)
def test_intervals_exact_2():
res = list(get_files_and_intervals(eight, nine))
eq_(len(res), 2)
eq_(res[0][1], 0)
eq_(res[0][2], 0)
eq_(res[1][1], 0)
eq_(res[1][2], 3599)
def test_intervals_partial_2():
res = list(get_files_and_intervals(eight, nine + minutes(50)))
eq_(len(res), 2)
eq_(res[0][1], 0)
eq_(res[0][2], 0)
eq_(res[1][1], 0)
eq_(res[1][2], 599)
def test_intervals_full_2():
res = list(get_files_and_intervals(eight,
nine + minutes(59) + seconds(59)))
eq_(len(res), 2)
eq_(res[0][1], 0)
eq_(res[0][2], 0)
eq_(res[1][1], 0)
eq_(res[1][2], 0)
def test_intervals_exact_3():
res = list(get_files_and_intervals(eight, ten))
eq_(len(res), 3)
eq_(res[0][1], 0)
eq_(res[0][2], 0)
eq_(res[1][1], 0)
eq_(res[1][2], 0)
eq_(res[2][1], 0)
eq_(res[2][2], 3599)
def test_intervals_partial_3():
res = list(get_files_and_intervals(eight, ten+minutes(50)))
eq_(len(res), 3)
eq_(res[0][1], 0)
eq_(res[0][2], 0)
eq_(res[1][1], 0)
eq_(res[1][2], 0)
eq_(res[2][1], 0)
eq_(res[2][2], 599)
def test_intervals_full_3():
res = list(get_files_and_intervals(eight, ten+minutes(59) + seconds(59)))
eq_(len(res), 3)
eq_(res[0][1], 0)
eq_(res[0][2], 0)
eq_(res[1][1], 0)
eq_(res[1][2], 0)
eq_(res[2][1], 0)
eq_(res[2][2], 0)
def test_intervals_middle_1():
res = list(get_files_and_intervals(eight + minutes(20),
nine - minutes(20)))
eq_(len(res), 1)
eq_(res[0][1], 20*60)
eq_(res[0][2], 20*60-1)
def test_intervals_left_2():
res = list(get_files_and_intervals(eight+minutes(30), nine))
eq_(len(res), 2)
eq_(res[0][1], 30*60)
eq_(res[0][2], 0)
eq_(res[1][1], 0)
eq_(res[1][2], 3599)
# MP3 Join
def test_mp3_1():
eq_(' '.join(mp3_join((('a', 0, 0),), 'foo.mp3')),
'ffmpeg -i concat:a -codec:a copy foo.mp3')
def test_mp3_1_left():
eq_(' '.join(mp3_join((('a', 160, 0),), 'foo.mp3')),
'ffmpeg -i concat:a -codec:a copy -ss 160 foo.mp3')
def test_mp3_1_right():
eq_(' '.join(mp3_join((('a', 0, 1600),), 'foo.mp3')),
'ffmpeg -i concat:a -codec:a copy -to 2000 foo.mp3')
def test_mp3_1_leftright():
eq_(' '.join(mp3_join((('a', 160, 1600),), 'foo.mp3')),
'ffmpeg -i concat:a -codec:a copy -ss 160 -to 2000 foo.mp3')
def test_mp3_2():
eq_(' '.join(mp3_join((('a', 0, 0), ('b', 0, 0)), 'foo.mp3')),
'ffmpeg -i concat:a|b -codec:a copy foo.mp3')
def test_mp3_2_leftright():
eq_(' '.join(mp3_join((('a', 1000, 0), ('b', 0, 1600)), 'foo.mp3')),
'ffmpeg -i concat:a|b -codec:a copy -ss 1000 -to 5600 foo.mp3')
<file_sep>/*global $*/
//TODO: move to a separate file(?)
var config = {
polling_interval: 500,
datetimeformat: function(d) {
if(Math.abs(new Date() - d) > (3*60*60*1000)) {
return d.toLocaleString();
}
return d.toLocaleTimeString();
}
};
var API = {
create: function() {
return $.ajax('/api/create', {
method: 'POST',
dataType: 'json'
});
},
stop: function(rec) {
return $.post('/api/update/' + rec.id, {
starttime: rec.starttime
});
},
update: function(id, data) {
return $.post('/api/update/' + id, data);
},
generate: function(rec) {
return $.post('/api/generate', {
id: rec.id
});
},
get_ongoing: function() {
return $.getJSON('/api/get/ongoing');
}
};
$.widget("ror.countclock", {
options: {
since: null,
to: null
},
_create: function() {
this._update();
//TODO: aggiungi conto secondi/minuti passati
},
_setOption: function(key, value) {
this.options[key] = value;
this._update();
},
_update: function() {
if(this.options.since !== null) {
if(this.options.to === null) {
this.element.text("Registrando da " +
config.datetimeformat(this.options.since)
);
} else {
this.element.text("Registrando da " +
config.datetimeformat(this.options.since) +
" a " +
config.datetimeformat(this.options.to)
);
}
} else {
this.element.text('');
}
}
});
$.widget("ror.ongoingrec", {
options: {
rec: null,
state: 0,
filename: null,
/*0 = ongoing, 1 = encoding, 2 = ready to download*/
},
_create: function() {
"use strict";
//convert a Rec into a <tr>
var widget = this;
var rec = this.options.rec;
var view = this.element.data('rec', rec).addClass('ongoing-rec').append(
$('<td/>').append(
$('<input/>').attr('placeholder', 'Nome trasmissione')
)
).append( $('<td class="ongoingrec-time"/>').countclock()).append(
$('<td/>').append($('<a/>')
.addClass('pure-button pure-button-large'))
);
this._update();
view.on("change", "input", function(evt) {
console.log('change', evt);
var prevrec = widget.options.rec;
prevrec.name = $(evt.target).val();
$(evt.target).parents('tr.ongoing-rec').data('rec', prevrec);
widget._trigger("change", evt,
{rec: rec, widget: widget, changed: {name: rec.name}}
);
});
view.on("click", ".rec-stop", function(evt) {
widget._trigger("stop", evt, {rec: rec, widget: widget});
});
return view;
},
_setOption: function(key, value) {
this.options[key] = value;
if(key === 'state') {
if(value < 2) {
this.options.filename = null;
}
}
this._update();
},
_update: function() {
var rec = this.options.rec;
this.element.find('input').val(rec.name);
this.element.find(':ror-countclock').countclock("option", "since",
rec.starttime !== null ? new Date(rec.starttime*1000) : null
);
if(this.options.state > 0) {
this.element.find(':ror-countclock').countclock("option", "to",
rec.endtime !== null ? new Date(rec.endtime*1000) : null
);
} else {
this.element.find(':ror-countclock').countclock("option", "to", null);
}
switch(this.options.state) {
case 0:
this.element.find('a').removeClass('pure-button-disabled rec-encoding rec-download')
.addClass("rec-stop").html(
$('<i/>').addClass('fa fa-stop')).append(' Stop');
break;
case 1:
this.element.find('a').removeClass('rec-stop rec-download')
.addClass("pure-button-disabled rec-encoding").html(
$('<i/>').addClass('fa fa-clock-o')).append(' Aspetta');
break;
case 2:
this.element.find('a').removeClass('pure-button-disabled rec-stop rec-encoding')
.addClass("rec-download")
.prop('href', this.options.filename)
.html(
$('<i/>').addClass('fa fa-download').css('color',
'green')).append(' Scarica');
break;
}
}
});
function poll_job(job_id, callback) {
$.getJSON('/api/jobs/' + job_id)
.done(function(data) {
if(data.job_status !== 'WIP') {
console.log("polling completed for job[" + job_id + "]", data);
callback(data);
} else {
setTimeout(function() { poll_job(job_id, callback); },
config.polling_interval);
}
});
}
function add_new_rec() {
//progress()
return API.create()
.done(function(res) {
//passa alla seconda schermata
$('#rec-inizia').remove();
$('#rec-normal').show();
show_ongoing([res.rec]);
})
.fail(function() {
alert("C'e' stato qualche problema nella comunicazione col server");
});
}
function stop_rec(rec, widget) {
"use strict";
var xhr = API.stop(rec);
xhr.done(function(res_update) {
if(res_update.status !== true) {
console.error(res_update.status);
return;
}
widget.option("rec", res_update.rec);
var xhr = API.generate(rec)
.done(function(res_gen) {
//TODO: start polling on res.job_id
widget.option("state", 1);
poll_job(res_gen.job_id, function(data) {
if(data.job_status !== 'DONE') {
console.error("Job failed!", data);
} else {
widget.option("filename", res_gen.result);
widget.option("state", 2);
}
});
});
return xhr;
});
return xhr; //API.stop
}
function show_ongoing(ongoing_recs) {
return ongoing_recs.map(function(rec) {
var viewrec = $('<tr/>').ongoingrec({rec: rec});
viewrec.on("ongoingrecstop", function(evt, data) {
stop_rec(data.rec, data.widget);
}).on("ongoingrecchange", function(evt, data) {
//TODO: aggiorna nome sul server
API.update(data.rec.id, data.rec);
});
$('#ongoing-recs-table tbody').prepend(viewrec);
return viewrec;
});
}
$(function() {
"use strict";
/*global getKeys*/
//TODO: get-ongoing
API.get_ongoing()
.done(function(recs) {
$('.add-new-rec').click(add_new_rec);
console.log(recs);
if(getKeys(recs).length !== 0) {
$('#rec-inizia').remove();
$('#rec-normal').show();
show_ongoing(getKeys(recs).map(function(id) { console.log(id); return recs[id]; }));
}
});
});
//POLYFILL for Object.keys
function getKeys(obj) {
var keys = [];
var key;
for(key in obj) {
if(obj.hasOwnProperty(key)) {
keys.push(key);
}
}
return keys;
}
/* vim: set ts=2 sw=2 noet fdm=indent: */
<file_sep>SQLAlchemy==0.8.3
bottle==0.11.6
wsgiref==0.1.2
<file_sep>OUTPUT_DIR='output'
HOST='localhost'
PORT='8000'
DEBUG=True
DB_URI='sqlite:///techrec.db'
AUDIO_OUTPUT='output/'
AUDIO_INPUT='rec/'
<file_sep>from datetime import datetime, timedelta
import os.path
from subprocess import Popen
from config_manager import get_config
def get_timefile_exact(time):
'''
time is of type `datetime`; it is not "rounded" to match the real file;
that work is done in get_timefile(time)
'''
return os.path.join(
get_config()['AUDIO_INPUT'],
time.strftime('%Y-%m/%d/rec-%Y-%m-%d-%H-%M-%S-ror.mp3')
)
def round_timefile(exact):
'''
This will round the datetime, so to match the file organization structure
'''
return datetime(exact.year, exact.month, exact.day, exact.hour)
def get_timefile(exact):
return get_timefile_exact(round_timefile(exact))
def get_files_and_intervals(start, end, rounder=round_timefile):
'''
both arguments are datetime objects
returns an iterator whose elements are (filename, start_cut, end_cut)
Cuts are expressed in seconds
'''
print '%s < %s' % (start, end)
if end <= start:
raise ValueError("end < start!")
while start <= end:
begin = rounder(start)
start_cut = (start - begin).total_seconds()
if end < begin + timedelta(seconds=3599):
end_cut = (begin + timedelta(seconds=3599) - end).total_seconds()
else:
end_cut = 0
yield (begin, start_cut, end_cut)
start = begin + timedelta(hours=1)
def mp3_join(named_intervals, target):
'''
Note that these are NOT the intervals returned by get_files_and_intervals,
as they do not supply a filename, but only a datetime.
What we want in input is basically the same thing, but with get_timefile()
applied on the first element
This function make the (quite usual) assumption that the only start_cut (if
any) is at the first file, and the last one is at the last file
'''
ffmpeg = 'ffmpeg' # binary name
startskip = None
endskip = None
files = []
for (filename, start_cut, end_cut) in named_intervals:
# this happens only one time, and only at the first iteration
if start_cut:
assert startskip is None
startskip = start_cut
# this happens only one time, and only at the first iteration
if end_cut:
assert endskip is None
endskip = end_cut
assert '|' not in filename
files.append(filename)
cmdline = [ffmpeg, '-i', 'concat:%s' % '|'.join(files), '-acodec',
'copy']
if startskip is not None:
cmdline += ['-ss', str(startskip)]
if endskip is not None:
cmdline += ['-t', str(len(files)*3600 - (startskip + endskip))]
cmdline += [target]
cmdline += ['-loglevel', 'warning']
return cmdline
def create_mp3(start, end, outfile, options={}, **kwargs):
p = Popen(mp3_join([(get_timefile(begin), start_cut, end_cut)
for begin, start_cut, end_cut
in get_files_and_intervals(start, end)],
outfile))
p.wait()
if p.returncode != 0:
raise OSError("return code was %d" % p.returncode)
return True
<file_sep>/*
TODO:
prendere lo stato !!!
*/
$(document).ready(function(){
$("#searchform").submit(
function (event) {
event.preventDefault();
dataString = $(this).serialize();
var request = $.getJSON('/api/search', dataString);
$("#searchresult").html(" ");
request.done( function(data) {
$.each(data, function(key, val) {
console.log("Extract " + key );
var divstring = "<div class=\"searchresult\" id=\""+ rs_trxarea(key) +"\"> </div>";
$("#searchresult").append( divstring );
// var str = newformstr( key ); // new form
var str = "";
str += "<div class=\"namevalues\">"+val.name+" - <a href=\"\">Scarica</a> <a href=\"\" id=\"delete-"+val.id+"\">Cancella</a></div>";
str += "<div class=\"namevalues\">RECID: "+val.recid+" ID: "+ val.id + " Active " + val.active + "</div>";
str += "<div class=\"timevalues\">["+val.starttime+" >>> "+val.endtime+"]</div>"
$("#"+rs_trxarea(key)).html(str);
$("#delete-"+val.id).click(function(evt) {
evt.preventDefault();
recDelete(val.recid, rs_trxarea(key) );
}
); // End of delete link handler
});
});
});
$("#searchform").submit();
});
// vim: set ts=4 sw=4 noet:
<file_sep>#!/bin/bash
# TODO: mettere le vars in un file di configurazione con le funzioni
# Liquidsoap's archive
W_F_BASEDIR="/rec/ror/"
# file to handle
W_F_FIFODIR="/tmp/rorrec/"
W_F_LOGDIR="/var/log/techrec/"
function err {
echo -e $1
exit
}
function err2file {
echo -e "$1" >> "$2"
}
if test -z "`which ffmpeg`"; then
err "Install ffmpeg"
fi
if test -z "`which inotifywatch `"; then
err "Install ffmpeg"
fi
# ESEMPIO: /rec/ror/2012-11/14/rec-2012-11-14-10-00-00-ror.mp3
function get_basedir {
# $1 anno
# $2 mese
# $3 giorno
echo "${W_F_BASEDIR}/${1}-${2}/${3}/"
}
function get_filename {
# $1 anno
# $2 mese
# $3 giorno
# $4 ora
echo "`get_basedir ${1} ${2} ${3}`/rec-${1}-${2}-${3}-${4}-00-00-ror.mp3"
}
function mp3extractor {
echo "COMPUTE MP3extator with $1"
sleep 2 # support inotify latency ..
source $1
# s="$1"
# e="$2"
# outfile="$3"
# Create dir for filess
outdir="`dirname ${outfile}`"
mkdir -p ${outdir}/ &>/dev/null
chmod g+w -R ${outdir}
logfile="${outfile}.log"
echo "S: $s -- E: $e -- OUTFILE:${outfile} -- LOGFILE:${logfile}"
if test -z "${s}" -o -z "${e}" -o -z "${outfile}"; then
err "$0 <starttime> <endtime> <outfile>\n\t$0 2012-11-14-10-20 2012-11-14-12-12 file.mp3\n"
fi
sa=`date -d "${s}" +%Y`
sm=`date -d "${s}" +%m`
sg=`date -d "${s}" +%d`
so=`date -d "${s}" +%H`
si=`date -d "${s}" +%M`
s_u=`date -d "${s}" +%s`
ea=`date -d "${e}" +%Y`
em=`date -d "${e}" +%m`
eg=`date -d "${e}" +%d`
eo=`date -d "${e}" +%H`
ei=`date -d "${e}" +%M`
e_u=`date -d "${e}" +%s`
if test ${s_u} -gt ${e_u}; then
err2file "Start TIME >> END TIME" "${logfile}"
err "Start TIME >> END TIME"
fi
# echo "STARTTIME ${sa} ${sm} ${sg} ${so} ${si} ${s_u}"
# echo "ENDTIME ${ea} ${em} ${eg} ${eo} ${ei} ${e_u}"
# check starttime and endtime dir
sdir="`get_basedir ${sa} ${sm} ${sg}`"
edir="`get_basedir ${ea} ${em} ${eg}`"
test ! -d "${sdir}" && err "starttime dir (${sdir}) error"
test ! -d "${edir}" && err "endtime dir (${edir}) error"
sf=`get_filename ${sa} ${sm} ${sg} ${so}`
ef=`get_filename ${ea} ${em} ${eg} ${eo}`
duration_u=`expr ${e_u} - ${s_u}`
echo ""
echo -e "\tStart file ${sf}"
echo -e "\tEnd file ${ef}"
echo -e "\tDuration: ${duration_u} seconds"
if test ${duration_u} -ge `expr 120 \* 60`; then
err2file "MP3 richiesto > 2 ore..." "${logfile}"
err "$0 works only with two files!"
fi
# Se inizio == fine, mi basta un solo comando
if test "${sf}" = "${ef}" ; then
duration=`expr ${ei} - ${si}`
echo "INIZIO = FINE .. durata (${ei} - ${si}) ${duration}"
cmd="ffmpeg -i ${sf} -acodec copy -t 00:${duration}:00 -ss 00:${si}:00 ${outfile}"
echo "EXEC: ${cmd}"
${cmd}
ret=$?
echo "EXEC RET: $ret"
err2file "CMD: $cmd -- RET: $ret" "${logfile}"
[ $ret -ne 0 ] && err2file "ERRORE INIZIO=FINE" "${logfile}"
rm $1
exit
fi
intro="/tmp/intro-`basename "${outfile}"`.mp3" # Aggiungere casualita' per esecuzioni concorrenti
coda="/tmp/coda-`basename "${outfile}"`.mp3"
echo ""
echo "Compute intro.."
cmd="ffmpeg -i ${sf} -acodec copy -ss 00:${si}:00 -t 00:`expr 60 - ${si}`:00 ${intro}"
echo "EXEC: ${cmd}"
${cmd}
ret=$?
err2file "CMD: $cmd -- RET: $ret" "${logfile}"
[ $ret -ne 0 ] && err "ERRORE ESTRAZIONE INTRO"
echo "Compute end.."
cmd="ffmpeg -i ${ef} -acodec copy -ss 00:00:00 -t 00:${ei}:00 ${coda}"
echo "EXEC: ${cmd}"
${cmd}
ret=$?
err2file "CMD: $cmd -- RET: $ret" "${logfile}"
[ $ret -ne 0 ] && err "ERRORE ESTRAZIONE CODA"
# MERGE
ffmpeg -i concat:${intro}\|${coda} -acodec copy ${outfile}
ret=$?
err2file "CMD: $cmd -- RET: $ret" "${logfile}"
rm ${intro} ${coda}
[ $ret -ne 0 ] && err2file "ERRORE CONCAT" "${outfile}"
# DELETE FIFO FILES::::
rm $1
exit
}
#
#
# ::: MAIN :::
#
#
# TODO: GESTIRE IL PROBLEMA DEI PERMESSI
mkdir -p ${W_F_LOGDIR} ${W_F_FIFODIR} &> /dev/null
chown www-data -R ${W_F_LOGDIR} ${W_F_FIFODIR}
if test ! -d "${W_F_BASEDIR}"; then
err "No AUdio file dir (${W_F_BASEDIR})"
fi
s="$1"
e="$2"
outfile="$3"
# err "$0 <starttime> <endtime> <outfile>\n\t$0 2012-11-14-10-20 2012-11-14-12-12 file.mp3\n
if test ! -z "${s}" -a ! -z "${e}" -a ! -z "${outfile}" ; then
echo "direct call"
echo -e "s=\"${s}\"\ne=\"${e}\"\noutfile=\"${outfile}\"\n" > /tmp/tmpfile
mp3extractor /tmp/tmpfile
exit
fi
echo "No input parameter.. inotify mode"
echo "es: $0 \"2013/04/11 11:25\" \"2013/04/11 11:30\" outfile.mp3"
# esempio:
# mp3wrapper "2012/11/14 10:20" "2012/11/14 10:25"
while [ 1 = 1 ]; do
res=`inotifywait ${W_F_FIFODIR} 2> /dev/null`
echo ${res}
if test `echo ${res} | grep -c CREATE` -eq 0; then
echo "No relevant task - ${res}"
continue
fi
newfile=`echo ${res} | grep CREATE | sed -e 's/\ CREATE\ //g'`
echo "Newfile: ${newfile}"
mp3extractor "${newfile}" >> ${W_F_LOGDIR}/access.log &
done
exit
<file_sep>import logging
import sys
try:
from sqlalchemy import create_engine, Column, Integer, String, DateTime
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
except:
sys.exit("No SQLAlchemy.")
PAGESIZE = 10
"""
This class describe a single Record (Rec() class) and the
records manager (RecDB() class)
"""
Base = declarative_base()
class Rec(Base):
'''Entry on the DB'''
__tablename__ = 'rec'
id = Column(Integer, primary_key=True)
name = Column(String, nullable=True)
starttime = Column(DateTime, nullable=True)
endtime = Column(DateTime, nullable=True)
filename = Column(String, nullable=True)
def __init__(self, name="", starttime=None, endtime=None,
filename=None):
self.name = name
self.starttime = starttime
self.endtime = endtime
self.filename = filename
def serialize(self):
'''json-friendly encoding'''
return {'id': self.id,
'name': self.name,
'starttime': self.starttime,
'endtime': self.endtime,
'filename': self.filename
}
def __repr__(self):
contents = "id:'%s',name:'%s',Start: '%s',End: '%s'" % \
(self.id, self.name, self.starttime, self.endtime)
if self.filename is not None:
contents += ",Filename: '%s'" % self.filename
return "<Rec(%s)>" % contents
class RecDB:
def __init__(self, uri):
self.engine = create_engine(uri, echo=False)
self.conn = self.engine.connect()
self.log = logging.getLogger(name=self.__class__.__name__)
logging.getLogger('sqlalchemy.engine').setLevel(logging.FATAL)
logging.getLogger('sqlalchemy.engine.base.Engine')\
.setLevel(logging.FATAL)
logging.getLogger('sqlalchemy.dialects').setLevel(logging.FATAL)
logging.getLogger('sqlalchemy.pool').setLevel(logging.FATAL)
logging.getLogger('sqlalchemy.orm').setLevel(logging.FATAL)
Base.metadata.create_all(self.engine) # create Database
Session = sessionmaker(bind=self.engine)
self.session = Session()
self.err = ""
def add(self, simplerecord):
print self.session.add( simplerecord )
self.commit()
self.log.info("New Record: %s" % simplerecord)
return ( simplerecord )
def update(self, id, rec):
## TODO: rlist = results list
_rlist = self._search(_id=id)
if not len(_rlist) == 1:
raise ValueError('Too many recs with id=%s' % id)
self.log.debug("DB:: Update request %s:%s " % (id, rec))
self.log.debug("DB:: Update: data before %s" % _rlist[0])
# 2013-11-24 22:22:42
_rlist[0].starttime = rec["starttime"]
_rlist[0].endtime = rec["endtime"]
if 'name' in rec:
_rlist[0].name = rec["name"]
self.log.debug("DB:: Update: data AFTER %s" % _rlist[0])
self.commit()
self.log.debug("DB:: Update complete")
return _rlist[0]
def delete(self, recid):
_rlist = self._search(id=recid)
if len(_rlist) == 0:
self.log.info("DB: Delete: no record found!")
self.err = "No rec found"
return False
if len(_rlist) > 1:
self.log.warning("DB: Delete: multilpe records found!")
self.err = "multiple ID Found %s" % (_rlist)
return False
self.session.delete(_rlist[0])
logging.info("DB: Delete: delete complete")
self.commit()
return True
def commit(self):
logging.info("DB: Commit!!")
self.session.commit()
def get_all(self, page=0, page_size=PAGESIZE):
return self._search(page=page, page_size=page_size)
def get_ongoing(self, page=0, page_size=PAGESIZE):
query = self._query_page(self._query_ongoing(), page, page_size)
return query.all()
def _query_ongoing(self, query=None):
if query is None:
query = self.session.query(Rec)
return query.filter(Rec.filename == None)
def _query_page(self, query, page=0, page_size=PAGESIZE):
if page_size:
page_size = int(page_size)
query = query.limit(page_size)
if page:
query = query.offset(page*page_size)
return query
def _query_generic(self, query, _id=None, name=None, starttime=None,
endtime=None):
if _id is not None:
query = query.filter_by(id=_id)
if name is not None:
query = query.filter(Rec.name.like("%"+name+"%"))
if starttime is not None:
_st = starttime
query = query.filter(Rec.starttime > _st)
if endtime is not None:
_et = endtime
query = query.filter(Rec.endtime < _et)
return query
def _search(self, _id=None, name=None, starttime=None,
endtime=None, page=0, page_size=PAGESIZE):
self.log.debug(
"DB: Search => id:%s name:%s starttime:%s endtime=%s" %
(_id, name, starttime, endtime))
query = self.session.query(Rec)
query = self._query_generic(query, _id, name, starttime,
endtime)
query = self._query_page(query, page, page_size)
self.log.debug("Searching: %s" % str(query))
ret = query.all()
return ret
def get_err(self):
print "DB error: %s" % (self.err)
t = self.err
self.err = ""
return t
if __name__ == "__main__":
from datetime import datetime
def printall(queryres):
for record in queryres:
print "Record: %s" % record
db = RecDB()
_mytime = datetime(2014,05,23,15,12,17)
_endtime = datetime(2014,05,24,17,45,17)
a = Rec(name="Mimmo1", starttime=_mytime, endtime=_endtime)
printall( db._search() )
sys.exit("End test job")
# a = Rec(name="Mimmo1", starttime=_mytime, endtime=None)
print "Aggiunto", db.add( a )
printall( db.get_all(page_size=5,page=0) )
print "Mimmo "
printall( db._search(name="Mimmo1"))
print "Search"
printall( db._search(name="Mimmo1",starttime=datetime(2014,05,24,15,16,1) ))
a = db.get_by_id(5)
a.start()
db.delete(1)
db.delete(2)
db.delete(4)
db.delete(1)
printall( db._search() )
<file_sep>console.log("Loading...");
function trx_startbut( code ) { return "startbutton-"+code; }
function trx_stopbut( code ) { return "stopbutton-"+code; }
function trx_downbut( code ) { return "downloadbutton-"+code; }
function trx_endbut( code ) { return "endbutton-"+code; }
function trx_logarea( code ) { return "logarea-"+code; }
function rs_button( code ) { return "button"+code; }
function rs_trxarea( code ) { return "recarea-"+code; }
function rs_trxname( code ) { return "name"; }
function rs_buttonarea( code ) { return "butarea-"+code; }
function rs_inputstart( code ) { return "starttime"; }
function rs_inputend( code ) { return "endtime"; }
function rs_formid(code) { return "form-"+code; }
function rs_dellink(code) { return "dellink-"+code;}
function rs_id(code) { return code; }
var txt_start = "Inizia";
var txt_stop = "Ferma";
var txt_download = "Scarica";
var srvaddr = "/";
var almostone = false;
var noplusbotton = true;
var rec_name_default = "";
/*
TODO: cambiare logica
Quando premo il primo tasto, faccio la crazione,
per ogni altro pulsante, faccio solo e sempre UPDATE
*/
/**
* Perform Ajax async loading
**/
function newformstr ( recid , butflag=false )
{
var formid = rs_formid( recid );
var str = "<form id=\""+formid+"\" name=\""+formid+"\" action=\"#\">";
if (butflag) {
str = str + "<input type=\"button\" name=\""+trx_startbut(recid)+"\" id=\""+trx_startbut(recid)+"\" ";
str = str + " class=\"recbutton\" value=\"Inizia\" />";
str = str + "<input type=\"button\" name=\""+trx_stopbut(recid)+"\" id=\""+trx_stopbut(recid)+"\" ";
str = str + " class=\"recbutton\" value=\"Stop\" />";
str = str + "<input type=\"submit\" name=\""+trx_downbut(recid)+"\" id=\""+trx_downbut(recid)+"\" ";
str = str + " class=\"recbutton\" value=\"Salva\" />";
str = str + "<input type=\"submit\" name=\""+trx_endbut(recid)+"\" id=\""+trx_endbut(recid)+"\" ";
str = str + " class=\"recbutton\" value=\"Download\" />";
}
str = str + "<input type=\"hidden\" id=\"recid\" name=\"recid\" value=\""+recid+"\" />";
str = str + "<input type=\"text\" id=\""+rs_trxname(recid)+"\" name=\""+rs_trxname(recid)+"\" />";
str = str + "<input type=\"text\" id=\""+rs_inputstart(recid)+"\" name=\""+rs_inputstart(recid)+"\" />";
str = str + "<input type=\"text\" id=\""+rs_inputend(recid)+"\" name=\""+rs_inputend(recid)+"\" />";
if (! butflag) {
str = str + "<input type=\"button\" name=\""+trx_downbut(recid)+"\" id=\""+trx_downbut(recid)+"\" ";
str = str + " class=\"downloadbutton\" value=\"scarica\" />";
}
/*
str = str + "<input type=\"text\" id=\"name\" name=\"name\" />";
str = str + "<input type=\"text\" id=\"starttime\" name=\"starttime\" />";
str = str + "<input type=\"text\" id=\"endtime\" name=\"endtime\" /> ";
*/
str = str + "</form>";
return str;
}
/**
* GetActive Recs
**/
function rec_active( recid ) {
dataString = "";
var request = RecAjax("search", dataString);
request.done( function(data) {
$.each(data, function(key, val) {
console.log("Key " + key + " > VAL " + val );
$("#"+trx_logarea( recid )).append( "Key " + key + " > VAL " + val + "<br>" );
});
console.log("Req OK: "+ data);
// console.log("request"+ req);
ChangeState(recid, trx_downbut(recid) , trx_endbut(recid));
});
}
/**
* New record
**/
function rec_new( )
{
var myDate = new Date()
console.log("New ID "+ myDate.getTime());
var recid = "rec-"+ myDate.getTime();
console.log("[rec_new] New Rec " + recid);
$("#buttonscontainer").append( "<div id=\""+rs_trxarea(recid)+"\" class=\"recarea\"> </div>" );
$("#"+rs_trxarea(recid)).append( "<div id=\""+rs_buttonarea(recid)+"\" class=\"buttonarea\"> </div>" );
console.log("[rec_new"+recid+"] add div (TRXArea, ButtonArea) ok " );
var formid = rs_formid( recid );
var str = newformstr(recid, butflag=true);
$("#"+rs_buttonarea(recid)).append( str );
$("#"+trx_stopbut(recid)).hide();
$("#"+trx_downbut(recid)).hide();
$("#"+trx_endbut(recid)).hide();
console.log("[rec_new "+recid+"] Form OK");
$("#"+rs_buttonarea(recid)).append( "<div class=\"dellinkarea\" > <a href=\"#\" id="+rs_dellink(recid)+"> cancella</a> </div>" );
// INSERT AND POPULATE BUTTON AREA
$("#"+rs_trxarea(recid)).append( "<div id=\""+trx_logarea(recid)+"\" class=\"logarea\"> Nuova trasmissione </div>" );
// Bind the Delete Links
$("#"+rs_dellink(recid)).click(function(){
console.log("Remove " + rs_trxarea(recid) + "[ID"+recid+"]");
// $("#"+rs_trxarea(recid)).remove();
recDelete (recid,rs_trxarea(recid));
});
// FORM SUBMIT: THE REC IS STOPPEND AND MUST BE PROCESSED
$("#"+formid).submit(function(event){
// Immediately, mark the end time (stop action)
ChangeState(recid, trx_downbut(recid) , trx_endbut(recid));
// Force a Name
while (true) {
if ( $("#"+rs_trxname(recid)).val() == "" )
{
var tmpname = prompt("Nessun nome di trasmissione!!!");
$("#"+rs_trxname(recid)).val(tmpname);
$("#"+trx_logarea(recid)).append("Titolo: <b>"+ tmpname +"</b> <br/>");
}
else { break; }
}
event.preventDefault();
// Update data (send to server) in order to save some information
recUpdate(recid);
recStart(recid);
}); // End of form SUBMIT
// Bind the STOP button
$("#"+trx_stopbut(recid)).click( function(event){
event.preventDefault();
ChangeState(recid, trx_stopbut(recid) , trx_downbut(recid));
recUpdate(recid);
}); // End of STOP button
// Bind the START button
$("#"+trx_startbut(recid)).click( function(event){
// Immediately, mark the start time (start action) and send it to Server
ChangeState(recid, trx_startbut(recid) , trx_stopbut(recid));
event.preventDefault();
recNew( recid );
}); // End of START button
console.log("New form has been built.");
}
/* Delete Record */
function recDelete ( recid, targetarea ) {
var formid = rs_formid( recid );
var dataString = "recid="+recid
console.log("Del rec: "+dataString);
var req_del = RecAjax("delete", dataString);
req_del.done (function(data) {
$.each(data, function(del_key, del_val) {
console.log("K:V " + del_key +":"+del_val );
if (del_key == "message") {
$("#"+targetarea).fadeOut( 200, function() { $(this).remove(); });
console.log("delete area "+rs_trxarea(key));
}
if (del_key == "error") {
alert("Impossibile cancellare elemento:\n" + del_val );
}
});
});
}
/* New Record */
function recNew ( recid ) {
var formid = rs_formid( recid );
var dataString = $("#"+formid).serialize();
console.log("New rec: "+dataString);
var request = RecAjax("create", dataString);
request.done( function(data) {
$.each(data, function(key, val) {
console.log("Received (K:V) ("+key+":"+val+")") ;
if (key == "msg") {
$("#"+trx_logarea(recid)).html("Nuova Registrazione </br> (recid:"+recid+") </br>");
$("#"+trx_logarea(recid)).append("Inizio: "+ $("#"+rs_inputstart(recid)).val() +"<br/>");
}
if (key == "error") {
$("#"+trx_logarea( recid )).html("Errore: impossibile creare una nuova registrazione"+val+" </ br>");
}
});
} );
return request;
}
/* Update Record */
function recUpdate( recid ) {
var formid = rs_formid( recid );
var dataString = $("#"+formid).serialize();
console.log("Sending Ajax Update request: "+ dataString);
//event.preventDefault();
var request = RecAjax("update", dataString );
request.done( function(data) {
$.each(data, function(key, val) {
console.log("recUpdate receive (k:v) ("+key+":"+val+")" );
if (key == "message") {
var str = "";
str += "<b>RecID</b> "+ recid + "</br>"
str += "<b>nome</b> "+ $("#"+rs_trxname(recid)).val() + "</br>"
str += "<b>Inizio</b> "+ $("#"+rs_inputstart(recid)).val() + "</br>"
str += "<b>Fine</b> "+ $("#"+rs_inputend(recid)).val() + "</br>"
$("#"+trx_logarea(recid)).html( str );
// if all elements have been recorded
if ($("#"+rs_trxname(recid)).val() != "") {
$("#"+trx_logarea(recid)).append( "<b>In Elaborazione</b>" );
}
}
if (key == "error") {
$("#"+trx_logarea( recid )).append( "Error:" + val +"<br>" );
}
}); // end of each
}); // end of request.done
}
/*
*
* AJAX REQUEST
*
*/
function RecAjax(apipath, dataString ) {
var srv = srvaddr + "api/" + apipath ;
var request = $.ajax({
type: "POST",
cache: false,
url: srv,
data: dataString,
dataType: "json"
});
request.fail(function (jqXHR, textStatus, errorThrown){
console.error("The following error occured: "+ jqXHR.status +"-"+ textStatus + "-" + errorThrown );
if (jqXHR.status == 0 && jqXHR.readyState === 4)
{
alert("Errore di connessione, impossibile inviare i dati al server "+ srv);
} else {
alert("Error: "+jqXHR.status +"\nTextStatus: "+ textStatus + "\n Ready State "+jqXHR.readyState+"\n" + errorThrown );
}
});
return request;
}
/*
* GetNow (data parser)
*/
function getnow()
{
var myDate = new Date()
var displayDate = myDate.getFullYear() + '/' + (myDate.getMonth()+1) + '/' + myDate.getDate();
displayDate = displayDate +' '+ myDate.getHours()+':'+myDate.getMinutes()+':'+myDate.getSeconds();
return displayDate;
}
/*
FUNCTION: CHANGE STATE (gui)
*/
function ChangeState(recid, from, to) {
console.log("ChangeState: " + from + " --> " + to );
$("#"+from).css("display", "none");
$("#"+to).css("display", "inline");
// take the date
var displayDate = getnow();
if ( from == trx_startbut(recid) ) {
$("#"+rs_inputstart(recid)).val( displayDate );
console.log("ChangeState: set "+rs_inputstart(recid)+ " to "+ displayDate )
}
if ( from == trx_stopbut(recid) ) {
$("#"+rs_inputend(recid)).val( displayDate );
console.log("ChangeState: set '"+rs_inputend(recid)+ "' to "+ displayDate )
}
if ( from == trx_downbut(recid) ) {
$("input[type=submit]").attr("disabled", "disabled");
console.log("ChangeState: set '"+rs_inputend(recid)+ "' to "+ displayDate );
}
} // End function ChangeState
// vim: set ts=4 sw=4 et:
<file_sep>TECHREC
=======
A Python2 web application that assist radio speakers in recording their shows.
At the moment, it relies on some details that are specific of our radio (like
the directory/format of the continous recording).
Implementation details
======================
It is based on bottle, to get a minimal framework. Simple APIs are offered
through it, and the static site uses them.
Here are some examples of APIs usage
Create
--------
starttime-rec-1385231288390: 2013/11/23 19:32:49
endtime-rec-1385231288390: 2013/11/23 19:32:49
recid: rec-1385231288390
name-rec-1385231288390: adasd
op: new
Update
-------
starttime-rec-1385231288390: 2013/11/23 19:32:49
endtime-rec-1385231288390: 2013/11/23 19:32:49
recid: rec-1385231288390
name-rec-1385231288390: adasd
op: update
Delete
------
recid: rec-1385231288390
op: delete
<file_sep>import os
from datetime import datetime
import logging
logger = logging.getLogger('server')
from functools import partial
from bottle import Bottle, request, static_file, redirect, abort, response
from techrec import Rec, RecDB
from processqueue import get_process_queue
from forge import create_mp3
from config_manager import get_config
def date_read(s):
return datetime.fromtimestamp(int(s))
def date_write(dt):
return dt.strftime('%s')
def rec_sanitize(rec):
d = rec.serialize()
d['starttime'] = date_write(d['starttime'])
d['endtime'] = date_write(d['endtime'])
return d
class DateApp(Bottle):
'''
This application will expose some date-related functions; it is intended to
be used when you need to know the server's time on the browser
'''
def __init__(self):
Bottle.__init__(self)
self.route('/help', callback=self.help)
self.route('/date', callback=self.date)
self.route('/custom', callback=self.custom)
def date(self):
n = datetime.now()
return {
'unix': n.strftime('%s'),
'isoformat': n.isoformat(),
'ctime': n.ctime()
}
def custom(self):
n = datetime.now()
if 'strftime' not in request.query:
abort(400, 'Need argument "strftime"')
response.content_type = 'text/plain'
return n.strftime(request.query['strftime'])
def help(self):
response.content_type = 'text/plain'
return \
'/date : get JSON dict containing multiple formats of now()\n' + \
'/custom?strftime=FORMAT : get now().strftime(FORMAT)'
class RecAPI(Bottle):
def __init__(self):
Bottle.__init__(self)
self._route()
self.db = RecDB(get_config()['DB_URI'])
def _route(self):
self.post('/create', callback=self.create)
self.post('/delete', callback=self.delete)
self.post('/update/<recid:int>', callback=self.update)
self.post('/generate', callback=self.generate)
self.get('/help', callback=self.help)
self.get('/', callback=self.help)
self.get('/get/search', callback=self.search)
self.get('/get/ongoing', callback=self.get_ongoing)
self.get('/jobs', callback=self.running_jobs)
self.get('/jobs/<job_id:int>', callback=self.check_job)
def create(self):
req = dict(request.POST.allitems())
ret = {}
print "Server:: Create request %s " % req
starttime = datetime.now()
name = ""
endtime = datetime.now()
print "Starttime %s EndTime %s" %\
(starttime, endtime)
rec = Rec(name=name,
starttime=starttime,
endtime=endtime)
ret = self.db.add(rec)
return self.rec_msg("Nuova registrazione creata! (id:%d)" % ret.id,
rec=rec_sanitize(rec))
def delete(self):
req = dict(request.POST.allitems())
logging.info("Server: request delete %s " % (req))
if 'id' not in req:
return self.rec_err("No valid ID")
if self.db.delete(req["id"]):
return self.rec_msg("DELETE OK")
else:
return self.rec_err("DELETE error: %s" % (self.db.get_err()))
def update(self, recid):
req = dict(request.POST.allitems())
newrec = {}
now = datetime.now()
if 'starttime' not in req:
newrec['starttime'] = now
else:
newrec['starttime'] = date_read(req['starttime'])
if "endtime" not in req:
newrec['endtime'] = now
else:
newrec['endtime'] = date_read(req['endtime'])
if 'name' in req:
newrec["name"] = req["name"]
try:
logger.info("prima di update")
result_rec = self.db.update(recid, newrec)
logger.info("dopo update")
except Exception as exc:
return self.rec_err("Errore Aggiornamento", exception=exc)
return self.rec_msg("Aggiornamento completato!",
rec=rec_sanitize(result_rec))
def generate(self):
# prendiamo la rec in causa
recid = dict(request.POST.allitems())['id']
rec = self.db._search(_id=recid)[0]
if rec.filename is not None and os.path.filename.exists(rec.filename):
return {'status': 'ready',
'message': 'The file has already been generated at %s' %
rec.filename,
'rec': rec
}
rec.filename = 'ror-%s-%s.mp3' % \
(rec.starttime.strftime('%y%m%d_%H%M'),
filter(lambda c: c.isalpha(), rec.name))
self.db.update(rec.id, rec.serialize())
job_id = get_process_queue().submit(
create_mp3,
start=rec.starttime,
end=rec.endtime,
outfile=os.path.join(get_config()['AUDIO_OUTPUT'], rec.filename))
print "SUBMITTED: %d" % job_id
return self.rec_msg("Aggiornamento completato!",
job_id=job_id,
result='/output/' + rec.filename,
rec=rec_sanitize(rec))
def check_job(self, job_id):
try:
job = get_process_queue().check_job(job_id)
except ValueError:
abort(400, 'job_id not valid')
def ret(status):
return {'job_status': status, 'job_id': job_id}
if job is True:
return ret('DONE')
if job is False:
abort(404, 'No such job has ever been spawned')
else:
if job.ready():
try:
res = job.get()
return res
except Exception as exc:
r = ret('FAILED')
r['exception'] = str(exc)
return r
return ret('WIP')
def running_jobs(self):
res = {}
res['last_job_id'] = get_process_queue().last_job_id
res['running'] = get_process_queue().jobs.keys()
return res
def search(self, args=None):
req = dict()
req.update(request.GET.allitems())
print "Search request: %s" % (req)
values = self.db._search(**req)
from pprint import pprint
logger.debug("Returned Values %s" %
pprint([r.serialize() for r in values]))
ret = {}
for rec in values:
ret[rec.id] = rec_sanitize(rec)
logging.info("Return: %s" % ret)
return ret
def get_ongoing(self):
return {rec.id: rec_sanitize(rec)
for rec in self.db.get_ongoing()}
# @route('/help')
def help(self):
return "<h1>help</h1><hr/>\
<h2>/get, /get/, /get/<id> </h2>\
<h3>Get Info about rec identified by ID </h3>\
\
<h2>/search, /search/, /search/<key>/<value></h2>\
<h3>Search rec that match key/value (or get all)</h3>\
\
<h2>/delete/<id> </h2>\
<h3>Delete rec identified by ID </h3>\
<h2>/update </h2>\
<h3>Not implemented.</h3>"
# JSON UTILS
def rec_msg(self, msg, status=True, **kwargs):
d = {"message": msg, "status": status}
d.update(kwargs)
return d
def rec_err(self, msg, **kwargs):
return self.rec_msg(msg, status=False, **kwargs)
class RecServer:
def __init__(self):
self._app = Bottle()
self._route()
self.db = RecDB(get_config()['DB_URI'])
def _route(self):
### This is the API part of the app
# TODO: move to namespace /api/
# TODO: create a "sub-application"
## Static part of the site
self._app.route('/output/<filepath:path>',
callback=lambda filepath:
static_file(filepath,
root=get_config()['AUDIO_OUTPUT']))
self._app.route('/static/<filepath:path>',
callback=lambda filepath: static_file(filepath,
root='static/'))
self._app.route('/', callback=lambda: redirect('/new.html'))
self._app.route('/new.html',
callback=partial(static_file, 'new.html',
root='pages/'))
self._app.route('/tempo.html',
callback=partial(static_file, 'tempo.html',
root='pages/'))
if __name__ == "__main__":
configs = ['default_config.py']
if 'TECHREC_CONFIG' in os.environ:
for conf in os.environ['TECHREC_CONFIG'].split(':'):
if not conf:
continue
path = os.path.realpath(conf)
if not os.path.exists(path):
logger.warn("Configuration file '%s' does not exist; skipping"
% path)
continue
configs.append(path)
os.chdir(os.path.dirname(os.path.realpath(__file__)))
for conf in configs:
get_config().from_pyfile(conf)
c = RecServer()
c._app.mount('/date', DateApp())
c._app.mount('/api', RecAPI())
c._app.run(host=get_config()['HOST'], port=get_config()['PORT'],
debug=get_config()['DEBUG'])
|
807ca91fac731327c447d43ed1dbf77b3b4b228f
|
[
"JavaScript",
"Markdown",
"Python",
"Text",
"Shell"
] | 11
|
Python
|
vinc3nt/techrec
|
3c707bb47fe310d15cbc58434cb1bbaeb62edd15
|
4f2eee71f3248a2265a74ba0108456b613fd18a3
|
refs/heads/master
|
<repo_name>DJL0504/Git<file_sep>/aisile-parent/aisile-sellergoods-service/src/main/java/com/aisile/sellergoods/service/impl/SpeciflcationServiceImpl.java
package com.aisile.sellergoods.service.impl;
import java.util.List;
import java.util.Map;
import org.springframework.beans.factory.annotation.Autowired;
import com.aisile.mapper.TbSpecificationMapper;
import com.aisile.mapper.TbSpecificationOptionMapper;
import com.aisile.pojo.TbSpecification;
import com.aisile.pojo.TbSpecificationExample;
import com.aisile.pojo.TbSpecificationExample.Criteria;
import com.aisile.pojo.TbSpecificationOption;
import com.aisile.pojo.TbSpecificationOptionExample;
import com.aisile.pojo.entity.PageResult;
import com.aisile.pojo.entity.group.Specification;
import com.aisile.sellergoods.service.SpecificationService;
import com.alibaba.dubbo.config.annotation.Service;
import com.github.pagehelper.Page;
import com.github.pagehelper.PageHelper;
@Service
public class SpeciflcationServiceImpl implements SpecificationService {
@Autowired
private TbSpecificationMapper tbSpecificationMapper;
@Autowired
private TbSpecificationOptionMapper tbSpecificationOptionMapper;
@Override
public List<TbSpecification> findAll() {
return tbSpecificationMapper.selectByExample(null);
}
@Override
public PageResult findPage(int pageNum, int pageSize) {
PageHelper.startPage(pageNum, pageSize);
Page<TbSpecification> pageinfo = (Page<TbSpecification>) tbSpecificationMapper.selectByExample(null);
return new PageResult(pageinfo.getTotal(), pageinfo.getResult());
}
@Override
public PageResult findSearch(int pageNum, int pageSize, TbSpecification tbSpecification) {
PageHelper.startPage(pageNum, pageSize);
//多条件查询
TbSpecificationExample example = new TbSpecificationExample();
//开始拼接条件
Criteria criteria = example.createCriteria();
if(tbSpecification.getSpecName() != null && !tbSpecification.getSpecName().equals("")) {
criteria.andSpecNameLike("%"+tbSpecification.getSpecName()+"%");
}
Page<TbSpecification> pageinfo = (Page<TbSpecification>) tbSpecificationMapper.selectByExample(example);
return new PageResult(pageinfo.getTotal(), pageinfo.getResult());
}
@Override
public void add(Specification specification) {
//添加规格名称
TbSpecification tbspe = specification.getSpecification();
tbSpecificationMapper.insert(tbspe);
//循环添加规格明细
for (TbSpecificationOption option : specification.getSpecificationOptionList()) {
option.setSpecId(tbspe.getId());
tbSpecificationOptionMapper.insert(option);
}
}
@Override
public void update(Specification specification) {
//修改规格名称
TbSpecification tbspe = specification.getSpecification();
tbSpecificationMapper.updateByPrimaryKey(tbspe);
//根据相对应的规格明细进行删除
TbSpecificationOptionExample optionExample = new TbSpecificationOptionExample();
com.aisile.pojo.TbSpecificationOptionExample.Criteria criteria = optionExample.createCriteria();
criteria.andSpecIdEqualTo(tbspe.getId());
tbSpecificationOptionMapper.deleteByExample(optionExample);
//循环添加规格明细
for (TbSpecificationOption option : specification.getSpecificationOptionList()) {
option.setSpecId(tbspe.getId());
tbSpecificationOptionMapper.insert(option);
}
}
@Override
public Specification findOne(Long id) {
//规格实体
TbSpecification tbspe = tbSpecificationMapper.selectByPrimaryKey(id);
//通过规格实体的id,查询明细的数据
TbSpecificationOptionExample optionExample = new TbSpecificationOptionExample();
com.aisile.pojo.TbSpecificationOptionExample.Criteria criteria = optionExample.createCriteria();
criteria.andSpecIdEqualTo(tbspe.getId());
List<TbSpecificationOption> list = tbSpecificationOptionMapper.selectByExample(optionExample);
Specification specification = new Specification();
specification.setSpecification(tbspe);
specification.setSpecificationOptionList(list);
return specification;
}
@Override
public void delete(Long[] ids) {
for (Long id : ids) {
//循环删除规格名称
tbSpecificationMapper.deleteByPrimaryKey(id);
//删除规格名称应对的规格明细
TbSpecificationOptionExample optionExample = new TbSpecificationOptionExample();
com.aisile.pojo.TbSpecificationOptionExample.Criteria criteria = optionExample.createCriteria();
criteria.andSpecIdEqualTo(id);//指定规格id为条件
tbSpecificationOptionMapper.deleteByExample(optionExample);
}
}
@Override
public List<Map> selectOptionList() {
return tbSpecificationMapper.selectOptionList();
}
}
<file_sep>/aisile-parent/aisile-manager-web/src/main/java/com/aisile/manager/controller/GoodsController.java
package com.aisile.manager.controller;
import java.util.List;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import com.aisile.pojo.TbGoods;
import com.aisile.pojo.entity.PageResult;
import com.aisile.pojo.entity.Result;
import com.aisile.pojo.entity.group.Goods;
import com.aisile.sellergoods.service.GoodsService;
import com.alibaba.dubbo.config.annotation.Reference;
@RestController
@RequestMapping("/goods")
public class GoodsController {
@Reference
private GoodsService goodsService;
@RequestMapping("/findAll")
public List<TbGoods> findAll(){
return goodsService.findAll();
}
@RequestMapping("/findPage")
public PageResult findPage( int page,int rows){
return goodsService.findPage(page, rows);
}
@RequestMapping("/findSearch")
public PageResult findSearch(int page,int rows,@RequestBody TbGoods tbGoods) {
/*String sellerName = SecurityContextHolder.getContext().getAuthentication().getName();
tbGoods.setSellerId(sellerName);*/
return goodsService.findSearch(page, rows, tbGoods);
}
@RequestMapping("/add")
public Result add(@RequestBody Goods goods ){
try {
//根据认证用户获取id
String seler_id = SecurityContextHolder.getContext().getAuthentication().getName();
goods.getGoods().setSellerId(seler_id);
goodsService.add(goods);
return new Result(true, "添加成功");
} catch (Exception e) {
System.out.println(e.getMessage());
return new Result(false, "添加失败");
}
}
@RequestMapping("/update")
public Result update(@RequestBody Goods goods) {
try {
//效验是否是当前商家的ID
Goods goods2 = goodsService.findOne(goods.getGoods().getId());
//获取当前登录的商家ID
String sellerId = SecurityContextHolder.getContext().getAuthentication().getName();
//如果传递过来的商家ID并不是当前登录用户的ID,则属于非法操作
if (!goods2.getGoods().getSellerId().equals(sellerId) || !goods.getGoods().getSellerId().equals(sellerId)) {
return new Result(false, "非法操作");
}
goodsService.update(goods);
return new Result(true, "修改成功");
} catch (Exception e) {
System.out.println(e.getMessage());
return new Result(false, "修改失败");
}
}
@RequestMapping("/findOne")
public Goods findOne(Long id) {
return goodsService.findOne(id);
}
@RequestMapping("/dele")
public Result delete(Long [] ids) {
try {
goodsService.delete(ids);
return new Result(true, "删除成功");
} catch (Exception e) {
System.out.println(e.getMessage());
return new Result(false, "删除失败");
}
}
//驳回
@RequestMapping("/turnDown")
public Result turnDown(Long [] ids) {
try {
goodsService.turnDown(ids);
return new Result(true, "驳回成功");
} catch (Exception e) {
System.out.println(e.getMessage());
return new Result(false, "驳回失败");
}
}
//审核通过
@RequestMapping("/openGo")
public Result openGo(Long [] ids) {
try {
goodsService.openGo(ids);
return new Result(true, "审核成功");
} catch (Exception e) {
System.out.println(e.getMessage());
return new Result(false, "审核失败");
}
}
}
<file_sep>/aisile-parent/aisile-shop-web/src/main/webapp/js/service/itemCatService.js
//封装所有连接后端代码 需要$http
app.service('itemCatService',function($http){
//查询全部的数据
this.findAll = function(){
return $http.get('../itemCat/findAll.do');
}
//分页
this.findPage = function(page,rows){
return $http.get('../itemCat/findPage.do?page='+page+"&rows="+rows);
}
//分页带数据查询
this.findSearch = function(page,rows,searchEntity){
return $http.post('../itemCat/findSearch.do?page='+page+"&rows="+rows,searchEntity);
}
//修改
this.update = function(entity){
return $http.post('../itemCat/update.do',entity);
}
//添加
this.add = function(entity){
return $http.post('../itemCat/add.do',entity);
}
//根据id查询对象
this.findOne = function(id){
return $http.post('../itemCat/findOne.do?id='+id);
}
//删除
this.dele = function(selectIds){
return $http.get('../itemCat/dele.do?ids='+selectIds);
}
//根据父亲的id查询列表
this.findAllByParentId = function(id){
return $http.get('../itemCat/findAllByParentId.do?parentId='+id);
}
})<file_sep>/aisile-parent/aisile-sellergoods-interface/src/main/java/com/aisile/sellergoods/service/SellerService.java
package com.aisile.sellergoods.service;
import java.util.List;
import com.aisile.pojo.TbSeller;
import com.aisile.pojo.entity.PageResult;
import com.aisile.pojo.entity.Result;
public interface SellerService {
public List<TbSeller> findAll();
public PageResult findPage( int page,int rows);
public PageResult findSearch(int page,int rows,TbSeller tbSeller);
public void add(TbSeller tbSeller );
public void update(TbSeller tbSeller);
public TbSeller findOne(String id);
public Result delete(Long [] ids);
public void updateStatus(TbSeller tbSeller, String status);
}
<file_sep>/aisile-parent/aisile-sellergoods-interface/src/main/java/com/aisile/sellergoods/service/SpecificationService.java
package com.aisile.sellergoods.service;
import java.util.List;
import java.util.Map;
import com.aisile.pojo.TbSpecification;
import com.aisile.pojo.entity.PageResult;
import com.aisile.pojo.entity.group.Specification;
public interface SpecificationService {
/**
* 查询全部数据
* @return
*/
public List<TbSpecification> findAll();
/**
* 分页
* @param pageNum
* @param pageSize
* @return
*/
public PageResult findPage(int pageNum,int pageSize);
/**
* 模糊查询
* @param pageNum
* @param pageSize
* @param tbBrand
* @return
*/
public PageResult findSearch(int pageNum,int pageSize,TbSpecification tbSpecification);
/**
* 添加方法
* @param tnBrand
*/
public void add(Specification specification);
/**
* 修改方法
* @param tbBrand
*/
public void update(Specification specification);
/**
* 回显 返回的是一个对象
* @param id
* @return
*/
public Specification findOne(Long id);
/**
* 删除(批量删除)
* @param ids
*/
public void delete(Long [] ids);
/**
* 规格名称下拉框
* @return
*/
public List<Map> selectOptionList();
}
<file_sep>/aisile-parent/aisile-manager-web/src/main/java/com/aisile/manager/controller/ContentController.java
package com.aisile.manager.controller;
import java.util.List;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import com.aisile.content.service.ContentService;
import com.aisile.pojo.TbContent;
import com.aisile.pojo.TbSeller;
import com.aisile.pojo.entity.PageResult;
import com.aisile.pojo.entity.Result;
import com.alibaba.dubbo.config.annotation.Reference;
@RestController
@RequestMapping("/content")
public class ContentController {
@Reference
private ContentService contentService;
@RequestMapping("/findAll")
public List<TbContent> findAll(){
return contentService.findAll();
}
@RequestMapping("/findPage")
public PageResult findPage( int page,int rows){
return contentService.findPage(page, rows);
}
@RequestMapping("/findSearch")
public PageResult findSearch(int page,int rows,@RequestBody TbContent tbContent) {
return contentService.findSearch(page, rows, tbContent);
}
@RequestMapping("/add")
public Result add(@RequestBody TbContent tbContent ){
try {
contentService.add(tbContent);
return new Result(true, "添加成功");
} catch (Exception e) {
System.out.println(e.getMessage());
return new Result(false, "添加失败");
}
}
@RequestMapping("/update")
public Result update(@RequestBody TbContent tbContent) {
try {
contentService.update(tbContent);
return new Result(true, "修改成功");
} catch (Exception e) {
System.out.println(e.getMessage());
return new Result(false, "修改失败");
}
}
@RequestMapping("/updateStatus")
public Result updateStatus(@RequestBody TbSeller tbSeller,String status) {
try {
contentService.updateStatus(tbSeller,status);
return new Result(true, "修改成功");
} catch (Exception e) {
System.out.println(e.getMessage());
return new Result(false, "修改失败");
}
}
@RequestMapping("/findOne")
public TbContent findOne(Long id) {
return contentService.findOne(id);
}
@RequestMapping("/dele")
public Result delete(Long [] ids) {
try {
contentService.delete(ids);
return new Result(true, "删除成功");
} catch (Exception e) {
System.out.println(e.getMessage());
return new Result(false, "删除失败");
}
}
@RequestMapping("/shield")
public Result shield(Long[] ids) {
try {
contentService.shield(ids);
return new Result(true, "屏蔽成功");
} catch (Exception e) {
System.out.println(e.getMessage());
return new Result(false, "屏蔽失败");
}
}
@RequestMapping("/openq")
public Result openq(Long[] ids) {
try {
contentService.openq(ids);
return new Result(true, "开启成功");
} catch (Exception e) {
System.out.println(e.getMessage());
return new Result(false, "开启失败");
}
}
}
<file_sep>/aisile-parent/aisile-content-interface/src/main/java/com/aisile/content/service/ContentService.java
package com.aisile.content.service;
import java.util.List;
import com.aisile.pojo.TbContent;
import com.aisile.pojo.TbSeller;
import com.aisile.pojo.entity.PageResult;
public interface ContentService {
public List<TbContent> findAll();
public PageResult findPage( int page,int rows);
public PageResult findSearch(int page,int rows,TbContent tbContent);
public void add(TbContent tbContent );
public void update(TbContent tbContent);
public TbContent findOne(Long id);
public void delete(Long [] ids);
public void updateStatus(TbSeller tbSeller, String status);
/**
* 根据广告类型ID查询列表
* @param key
* @return
*/
public List<TbContent> findByCategoryId(Long categoryId);
//屏蔽
public void shield(Long [] ids);
//开启
public void openq(Long[] ids);
}
<file_sep>/aisile-parent/aisile-manager-web/src/main/webapp/js/service/contentCategoryService.js
//封装所有连接后端代码 需要$http
app.service('contentCategoryService',function($http){
//查询全部的数据
this.findAll = function(){
return $http.get('../contentCategory/findAll.do');
}
//分页
this.findPage = function(page,rows){
return $http.get('../contentCategory/findPage.do?page='+page+"&rows="+rows);
}
//分页带数据查询
this.findSearch = function(page,rows,searchEntity){
return $http.post('../contentCategory/findSearch.do?page='+page+"&rows="+rows,searchEntity);
}
//修改
this.update = function(entity){
return $http.post('../contentCategory/update.do',entity);
}
//添加
this.add = function(entity){
return $http.post('../contentCategory/add.do',entity);
}
//根据id查询对象
this.findOne = function(id){
return $http.post('../contentCategory/findOne.do?id='+id);
}
//删除
this.dele = function(selectIds){
return $http.get('../contentCategory/dele.do?ids='+selectIds);
}
//根据父亲的id查询列表
this.findAllByParentId = function(id){
return $http.get('../contentCategory/findAllByParentId.do?parentId='+id);
}
})<file_sep>/aisile-parent/aisile-portal-web/src/main/webapp/js/base_pagination.js
var app = angular.module('aisile',['pagination']);<file_sep>/aisile-parent/aisile-sellergoods-service/src/main/java/com/aisile/sellergoods/service/impl/ItemCatServiceImpl.java
package com.aisile.sellergoods.service.impl;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import com.aisile.mapper.TbItemCatMapper;
import com.aisile.pojo.TbItemCat;
import com.aisile.pojo.TbItemCatExample;
import com.aisile.pojo.TbItemCatExample.Criteria;
import com.aisile.pojo.entity.PageResult;
import com.aisile.sellergoods.service.ItemCatService;
import com.alibaba.dubbo.config.annotation.Service;
@Service
public class ItemCatServiceImpl implements ItemCatService {
@Autowired
private TbItemCatMapper itemCatMapper;
@Override
public List<TbItemCat> findAll() {
return itemCatMapper.selectByExample(null);
}
@Override
public PageResult findPage(int pageNum, int pageSize) {
// TODO Auto-generated method stub
return null;
}
@Override
public PageResult findSearch(int pageNum, int pageSize, TbItemCat tbItemCat) {
// TODO Auto-generated method stub
return null;
}
@Override
public void add(TbItemCat tbItemCat) {
itemCatMapper.insert(tbItemCat);
}
@Override
public void update(TbItemCat tbItemCat) {
itemCatMapper.updateByPrimaryKey(tbItemCat);
}
@Override
public TbItemCat findOne(Long id) {
return itemCatMapper.selectByPrimaryKey(id);
}
@Override
public List<TbItemCat> findAllByParentId(Long parentId) {
TbItemCatExample catExample = new TbItemCatExample();
catExample.createCriteria().andParentIdEqualTo(parentId);
return itemCatMapper.selectByExample(catExample);
}
@Override
public List<TbItemCat> findOne1(Long id) {
TbItemCatExample catExample = new TbItemCatExample();
catExample.createCriteria().andParentIdEqualTo(id);
return itemCatMapper.selectByExample(catExample);
}
@Override
public boolean delete(Long id) {
//根据规格id,删除规格
TbItemCatExample catExample = new TbItemCatExample();//新建对象
Criteria criteria = catExample.createCriteria();//开始拼接条件
criteria.andParentIdEqualTo(id);
int count = itemCatMapper.countByExample(catExample);
if(count == 0) {
//查询count 等于0表示 是可以删除的,但是不进行return,进行接着循环.
itemCatMapper.deleteByPrimaryKey(id);
}else if(count != 0){
//判断count!=0表示不可以进行删除 需要直接return结果,提示不可以删除
return false;
}
//最后删除完成 fore循环 读取return true 返回删除成功
return true;
}
}
<file_sep>/aisile-parent/aisile-manager-web/src/main/webapp/js/service/contentService.js
//封装所有连接后端代码 需要$http
app.service('contentService',function($http){
//查询全部的数据
this.findAll = function(){
return $http.get('../content/findAll.do');
}
//分页
this.findPage = function(page,rows){
return $http.get('../content/findPage.do?page='+page+"&rows="+rows);
}
//分页带数据查询
this.findSearch = function(page,rows,searchEntity){
return $http.post('../content/findSearch.do?page='+page+"&rows="+rows,searchEntity);
}
//修改
this.update = function(entity){
return $http.post('../content/update.do',entity);
}
//修改(针对审核使用)
this.updateStatus = function(entity,status){
return $http.post('../content/updateStatus.do?status='+status,entity);
}
//添加
this.add = function(entity){
return $http.post('../content/add.do',entity);
}
//根据id查询对象
this.findOne = function(id){
return $http.post('../content/findOne.do?id='+id);
}
//删除
this.dele = function(selectIds){
return $http.get('../content/dele.do?ids='+selectIds);
}
//屏蔽
this.shield = function(selectIds){
return $http.get('../content/shield.do?ids='+selectIds);
}
//开启
this.openq = function(selectIds){
return $http.get('../content/openq.do?ids='+selectIds);
}
//下拉框
this.selectOptionList = function(){
return $http.get('../content/selectOptionList.do');
}
})<file_sep>/aisile-parent/aisile-sellergoods-interface/src/main/java/com/aisile/sellergoods/service/GoodsService.java
package com.aisile.sellergoods.service;
import java.util.List;
import com.aisile.pojo.TbGoods;
import com.aisile.pojo.entity.PageResult;
import com.aisile.pojo.entity.group.Goods;
public interface GoodsService {
/**
* 查询全部数据
* @return
*/
public List<TbGoods> findAll();
/**
* 分页
* @param pageNum
* @param pageSize
* @return
*/
public PageResult findPage(int pageNum,int pageSize);
/**
* 模糊查询
* @param pageNum
* @param pageSize
* @param TbGoods
* @return
*/
public PageResult findSearch(int pageNum,int pageSize,TbGoods tbGoods);
/**
* 添加方法
* 使用的是组合类
* @param Goods
*/
public void add(Goods goods);
/**
* 修改方法
* @param TbGoods
*/
public void update(Goods goods);
/**
* 回显 返回的是一个对象
* @param id
* @return
*/
public Goods findOne(Long id);
/**
* 删除(批量删除)
* @param ids
*/
public void delete(Long [] ids);
/**
* 驳回
* @param ids
*/
public void turnDown(Long[] ids);
/**
* 申请通过
* @param ids
*/
public void openGo(Long[] ids);
/**
* 上下架处理
* @param ids
* @param status
*/
public void upDownFrame(Long [] ids,String status);
}
<file_sep>/aisile-parent/aisile-manager-web/src/main/webapp/js/base.js
var app = angular.module('aisile',[]);<file_sep>/aisile-parent/aisile-manager-web/src/main/java/com/aisile/manager/controller/ContentCategoryController.java
package com.aisile.manager.controller;
import java.util.List;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import com.aisile.content.service.ContentCategoryService;
import com.aisile.pojo.TbContentCategory;
import com.aisile.pojo.TbItemCat;
import com.aisile.pojo.entity.PageResult;
import com.aisile.pojo.entity.Result;
import com.alibaba.dubbo.config.annotation.Reference;
@RestController
@RequestMapping("/contentCategory")
public class ContentCategoryController {
@Reference
private ContentCategoryService contentCategoryService;
@RequestMapping("/findAll")
public List<TbContentCategory> findAll(){
return contentCategoryService.findAll();
}
@RequestMapping("/findPage")
public PageResult findPage( int page,int rows){
return contentCategoryService.findPage(page, rows);
}
@RequestMapping("/findSearch")
public PageResult findSearch(int page,int rows,@RequestBody TbContentCategory tbContentCategory) {
return contentCategoryService.findSearch(page, rows, tbContentCategory);
}
@RequestMapping("/add")
public Result add(@RequestBody TbItemCat tbItemCat) {
try {
contentCategoryService.add(tbItemCat);
return new Result(true, "添加成功");
} catch (Exception e) {
System.out.println(e.getMessage());
return new Result(false, "添加失败");
}
}
@RequestMapping("/update")
public Result update(@RequestBody TbItemCat tbItemCat) {
try {
contentCategoryService.update(tbItemCat);
return new Result(true, "修改成功");
} catch (Exception e) {
System.out.println(e.getMessage());
return new Result(false, "修改失败");
}
}
@RequestMapping("/findOne")
public TbItemCat findOne(Long id) {
return contentCategoryService.findOne(id);
}
@RequestMapping("/dele")
public Result delete(Long [] ids) {
try {
for (Long id : ids) {
contentCategoryService.delete(id);
}
return new Result(true, "删除成功");
} catch (Exception e) {
System.out.println(e.getMessage());
return new Result(false, "删除失败");
}
}
@RequestMapping("findAllByParentId")
public List<TbItemCat> findAllByParentId(Long parentId){
return contentCategoryService.findAllByParentId(parentId);
}
}
<file_sep>/aisile-parent/aisile-sellergoods-interface/src/main/java/com/aisile/sellergoods/service/TypeTemplateService.java
package com.aisile.sellergoods.service;
import java.util.List;
import java.util.Map;
import com.aisile.pojo.TbTypeTemplate;
import com.aisile.pojo.entity.PageResult;
public interface TypeTemplateService {
/**
* 查询全部数据
* @return
*/
public List<TbTypeTemplate> findAll();
/**
* 分页
* @param pageNum
* @param pageSize
* @return
*/
public PageResult findPage(int pageNum,int pageSize);
/**
* 模糊查询
* @param pageNum
* @param pageSize
* @param tbTypeTemplate
* @return
*/
public PageResult findSearch(int pageNum,int pageSize,TbTypeTemplate tbTypeTemplate);
/**
* 添加
* @param tbTypeTemplate
*/
public void add(TbTypeTemplate tbTypeTemplate);
/**
* 修改
* @param tbTypeTemplate
*/
public void update(TbTypeTemplate tbTypeTemplate);
/**
* 回显 返回的是一个对象
* @param id
* @return
*/
public TbTypeTemplate findOne(Long id);
/**
* 批量删除
* @param ids
*/
public void delete(Long [] ids);
/**
* 规格选项使用
*/
public List<Map> findOptionsList(Long id);
}
<file_sep>/aisile-parent/aisile-shop-web/src/main/java/com/aisile/shop/controller/TypeTemplateController.java
package com.aisile.shop.controller;
import java.util.List;
import java.util.Map;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import com.aisile.pojo.TbTypeTemplate;
import com.aisile.pojo.entity.PageResult;
import com.aisile.pojo.entity.Result;
import com.aisile.sellergoods.service.TypeTemplateService;
import com.alibaba.dubbo.config.annotation.Reference;
@RestController
@RequestMapping("/typeTemplate")
public class TypeTemplateController {
@Reference
private TypeTemplateService typeTemplateService;
@RequestMapping("/findAll")
public List<TbTypeTemplate> findAll(){
return typeTemplateService.findAll();
}
@RequestMapping("/findPage")
public PageResult findPage( int page,int rows){
return typeTemplateService.findPage(page, rows);
}
@RequestMapping("/findSearch")
public PageResult findSearch(int page,int rows,@RequestBody TbTypeTemplate tbTypeTemplate) {
return typeTemplateService.findSearch(page, rows, tbTypeTemplate);
}
@RequestMapping("/add")
public Result add(@RequestBody TbTypeTemplate tbTypeTemplate) {
try {
typeTemplateService.add(tbTypeTemplate);
return new Result(true, "添加成功");
} catch (Exception e) {
System.out.println(e.getMessage());
return new Result(false, "添加失败");
}
}
@RequestMapping("/update")
public Result update(@RequestBody TbTypeTemplate tbTypeTemplate) {
try {
typeTemplateService.update(tbTypeTemplate);
return new Result(true, "修改成功");
} catch (Exception e) {
System.out.println(e.getMessage());
return new Result(false, "修改失败");
}
}
@RequestMapping("/findOne")
public TbTypeTemplate findOne(Long id) {
//去后台获取id
return typeTemplateService.findOne(id);
}
@RequestMapping("/dele")
public Result delete(Long [] ids) {
try {
typeTemplateService.delete(ids);
return new Result(true, "删除成功");
} catch (Exception e) {
System.out.println(e.getMessage());
return new Result(false, "删除失败");
}
}
@RequestMapping("/findSpecList")
public List<Map> findOptionsList(Long id){
return typeTemplateService.findOptionsList(id);
}
}
<file_sep>/aisile-parent/aisile-sellergoods-service/src/main/java/com/aisile/sellergoods/service/impl/GoodsServiceImpl.java
package com.aisile.sellergoods.service.impl;
import java.util.Date;
import java.util.List;
import java.util.Map;
import org.springframework.beans.factory.annotation.Autowired;
import com.aisile.mapper.TbBrandMapper;
import com.aisile.mapper.TbGoodsDescMapper;
import com.aisile.mapper.TbGoodsMapper;
import com.aisile.mapper.TbItemCatMapper;
import com.aisile.mapper.TbItemMapper;
import com.aisile.mapper.TbSellerMapper;
import com.aisile.pojo.TbGoods;
import com.aisile.pojo.TbGoodsDesc;
import com.aisile.pojo.TbGoodsExample;
import com.aisile.pojo.TbGoodsExample.Criteria;
import com.aisile.pojo.TbItem;
import com.aisile.pojo.TbItemExample;
import com.aisile.pojo.TbSeller;
import com.aisile.pojo.entity.PageResult;
import com.aisile.pojo.entity.group.Goods;
import com.aisile.sellergoods.service.GoodsService;
import com.alibaba.dubbo.config.annotation.Service;
import com.alibaba.fastjson.JSON;
import com.github.pagehelper.Page;
import com.github.pagehelper.PageHelper;
@Service
public class GoodsServiceImpl implements GoodsService {
@Autowired
private TbGoodsMapper tbGoodsMapper;
@Autowired
private TbGoodsDescMapper tbGoodsDescMapper;
@Autowired
private TbItemMapper tbItemMapper;
@Autowired
private TbBrandMapper tbBrandMapper;
@Autowired
private TbItemCatMapper tbItemCatMapper;
@Autowired
private TbSellerMapper tbSellerMapper;
private void setItemValue(Goods goods,TbItem tbItem) {
tbItem.setCategoryid(goods.getGoods().getCategory3Id());//商品分类编号3级
tbItem.setCreateTime(new Date());
tbItem.setUpdateTime(new Date());
tbItem.setGoodsId(goods.getGoods().getId());//商家supId
TbSeller seller = tbSellerMapper.selectByPrimaryKey(goods.getGoods().getSellerId());
tbItem.setSeller(seller.getNickName());//商家名称
tbItem.setSellerId(seller.getSellerId());//商家ID
tbItem.setBrand(tbBrandMapper.selectByPrimaryKey(goods.getGoods().getBrandId()).getName());
tbItem.setCategory(tbItemCatMapper.selectByPrimaryKey(goods.getGoods().getCategory3Id()).getName());
//加载图片
List<Map> imgs = JSON.parseArray(goods.getGoodsDesc().getItemImages(), Map.class);
if (imgs != null) {
if (imgs.size()>0) {
tbItem.setImage((String)imgs.get(0).get("url"));
}
}
}
@Override
public List<TbGoods> findAll() {
// TODO Auto-generated method stub
return null;
}
@Override
public PageResult findPage(int pageNum, int pageSize) {
// TODO Auto-generated method stub
return null;
}
@Override
public PageResult findSearch(int pageNum, int pageSize, TbGoods tbGoods) {
//分页
PageHelper.startPage(pageNum, pageSize);
TbGoodsExample example = new TbGoodsExample();
Criteria criteria = example.createCriteria();
/*//根据商家的ID查询商家的商品
if (tbGoods.getSellerId() != null && tbGoods.getSellerId().length()>0) {
criteria.andSellerIdEqualTo(tbGoods.getSellerId());
}*/
if (tbGoods.getAuditStatus() != null && tbGoods.getAuditStatus().length()>0) {
criteria.andAuditStatusEqualTo(tbGoods.getAuditStatus());
}
if (tbGoods.getGoodsName() != null && tbGoods.getGoodsName().length()>0) {
criteria.andGoodsNameLike("%"+tbGoods.getGoodsName()+"%");
}
Page<TbGoods> pageinfo = (Page<TbGoods>) tbGoodsMapper.selectByExample(example);
return new PageResult(pageinfo.getTotal(), pageinfo.getResult());
}
@Override
public void add(Goods goods) {
//保存的是sup
goods.getGoods().setAuditStatus("0");//未审核状态
goods.getGoods().setIsMarketable("0");//下架
tbGoodsMapper.insert(goods.getGoods());
//可以获得goods插入的id
//保存的是sup_desc
goods.getGoodsDesc().setGoodsId(goods.getGoods().getId());
tbGoodsDescMapper.insert(goods.getGoodsDesc());
//保存的是suk 省略(以后再做)
saveItemList(goods);
}
private void saveItemList(Goods goods) {
List<TbItem> list = goods.getItemList();
if("1".equals(goods.getGoods().getIsEnableSpec())) {//如果启动了规格
for (TbItem tbItem : list) {
//需要哪些属性
//商品品牌. 根据规格拼接
String title = goods.getGoods().getGoodsName();
Map<String, Object> specmap = JSON.parseObject(tbItem.getSpec());
for (String maps : specmap.keySet()) {
title+=","+specmap.get(maps);
}
tbItem.setTitle(title);
setItemValue(goods, tbItem);
tbItemMapper.insert(tbItem);
}
}else {//没有启动规格 但是有默认的一个sku
TbItem tbItem = new TbItem();
//需要哪些属性
//商品品牌. 根据规格拼接
String title = goods.getGoods().getGoodsName();
tbItem.setTitle(title);//标题
tbItem.setPrice(goods.getGoods().getPrice());//价格
tbItem.setStatus("1");//状态
tbItem.setIsDefault("1");//默认
tbItem.setNum(9999);//库存
tbItem.setSpec("{}");//表中表的默认规范
setItemValue(goods, tbItem);
tbItemMapper.insert(tbItem);
}
}
@Override
public void update(Goods goods) {
goods.getGoods().setAuditStatus("0");//设置未申请状态:如果经过修改的商品,需要重新审核.
tbGoodsMapper.updateByPrimaryKey(goods.getGoods());//保存商品表
tbGoodsDescMapper.updateByPrimaryKey(goods.getGoodsDesc());//保存商品扩展
//删除原有的sku列表数据
TbItemExample example = new TbItemExample();
com.aisile.pojo.TbItemExample.Criteria criteria = example.createCriteria();
criteria.andGoodsIdEqualTo(goods.getGoods().getId());
tbItemMapper.deleteByExample(example);
//添加新的sku列表
saveItemList(goods);
}
@Override
public Goods findOne(Long id) {
Goods goods = new Goods();
//查询商品的基本信息(SPU)
TbGoods tbGoods = tbGoodsMapper.selectByPrimaryKey(id);
goods.setGoods(tbGoods);
//商品的详细信息(SPUdesc)
TbGoodsDesc tbGoodsDesc = tbGoodsDescMapper.selectByPrimaryKey(id);
goods.setGoodsDesc(tbGoodsDesc);
//商品的sku列表
TbItemExample example = new TbItemExample();
example.createCriteria().andGoodsIdEqualTo(id);
List<TbItem> list = tbItemMapper.selectByExample(example);
goods.setItemList(list);
return goods;
}
@Override
public void delete(Long[] ids) {
for (Long id : ids) {
tbGoodsMapper.deleteByPrimaryKey(id);
}
}
@Override
public void turnDown(Long[] ids) {
for (Long id : ids) {
TbGoods tbGoods = tbGoodsMapper.selectByPrimaryKey(id);
tbGoods.setAuditStatus("3");
tbGoodsMapper.updateByPrimaryKey(tbGoods);
}
}
@Override
public void openGo(Long[] ids) {
for (Long id : ids) {
TbGoods tbGoods = tbGoodsMapper.selectByPrimaryKey(id);
tbGoods.setAuditStatus("2");
tbGoodsMapper.updateByPrimaryKey(tbGoods);
}
}
@Override
public void upDownFrame(Long [] ids,String status ) {
for (Long id : ids) {
TbGoods tbGoods = tbGoodsMapper.selectByPrimaryKey(id);
tbGoods.setIsMarketable(status);
tbGoodsMapper.updateByPrimaryKey(tbGoods);
}
}
}
<file_sep>/aisile-parent/aisile-manager-web/src/main/webapp/js/service/typeTemplateService.js
//封装所有连接后端代码 需要$http
app.service('typeTemplateService',function($http){
//查询全部的数据
this.findAll = function(){
return $http.get('../typeTemplate/findAll.do');
}
//分页
this.findPage = function(page,rows){
return $http.get('../typeTemplate/findPage.do?page='+page+"&rows="+rows);
}
//分页带数据查询
this.findSearch = function(page,rows,searchEntity){
return $http.post('../typeTemplate/findSearch.do?page='+page+"&rows="+rows,searchEntity);
}
//修改
this.update = function(entity){
return $http.post('../typeTemplate/update.do',entity);
}
//添加
this.add = function(entity){
return $http.post('../typeTemplate/add.do',entity);
}
//根据id查询对象
this.findOne = function(id){
return $http.post('../typeTemplate/findOne.do?id='+id);
}
//删除
this.dele = function(selectIds){
return $http.get('../typeTemplate/dele.do?ids='+selectIds);
}
})<file_sep>/aisile-parent/aisile-manager-web/src/main/webapp/js/controller/goodsController.js
app.controller('goodsController', function($scope,$location,$http,goodsService,$controller,itemCatService) {
//继承baseController
$controller('baseController',{$scope:$scope});
//定义初始数组
$scope.list = [];
$scope.findAll = function(){
//读取列表数据绑定到表单中
goodsService.findAll.success(
function(response){
$scope.list = response;
}
)
}
//分页
$scope.findPage = function(page,rows,goodsService){
goodsService.findPage.success(
function(response){
$scope.list = response.rows;
$scope.paginationConf.totalItems = response.total;
}
)
}
//分页带查询数据
$scope.searchEntity = {};
$scope.findSearch = function(page,rows){
goodsService.findSearch(page,rows,$scope.searchEntity).success(
function(response){
console.log(response);
$scope.list = response.rows;
$scope.paginationConf.totalItems = response.total;
}
)
}
$scope.status=['未申请','申请中','审核通过','已驳回'];
/**
* 如何才能显示分类分类名称?
* 方法一:在后台代码中写关联查询语句,返回数据中直接有分类名称
* 方法二:在前段代码用ID去查询后端,异步返回商品分类名称
*/
//查询商品分类名称方法
$scope.itemCatList = [];
$scope.findItemCatList = function(){
itemCatService.findAll().success(function(response){
for (var i = 0; i < response.length; i++) {
$scope.itemCatList[response[i].id] = response[i].name;
}
})
}
//审核通过
$scope.openGo = function(){
goodsService.openGo($scope.selectIds).success(function(response){
if(response.success){
alert(response.message);
$scope.reloadList();//重新刷新数据
}else{
alert(response.message);
}
})
}
//驳回
$scope.turnDown = function(){
goodsService.turnDown($scope.selectIds).success(function(response){
if(response.success){
alert(response.message);
$scope.reloadList();//重新刷新数据
}else{
alert(response.message);
}
})
}
//查找对象
$scope.findOne = function(){
var id = $location.search()['id'];//获取参数值
console.log(id);
if(id == null){
return ;
}
goodsService.findOne(id).success(function(response){
$scope.entity = response;
})
}
//判断是否被选中
$scope.checkAttributeValue = function(specName,optionName){
var items = $scope.entity.goodsDesc.specificationItems;
var object = $scope.searchObjectByKey(items,'attributeName',specName);
if(object == null){
return false;
}else{
if(object.attributeValue.indexOf(optionName)>0){
return true;
}else{
return false;
}
}
}
//确认删除
$scope.del = function(){
swal({
title : '确定删除吗?',
text : '你将无法恢复它!',
type : 'warning',
showCancelButton : true,
confirmButtonColor : '#3085d6',
cancelButtonColor : '#d33',
confirmButtonText : '确定!',
cancelButtonText : '取消!',
confirmButtonClass : 'btn btn-success',
cancelButtonClass : 'btn btn-danger'
}).then(function(isConfirm){
if(isConfirm.value == true) {
goodsService.dele($scope.selectIds).success(function(response){
if(response.success){
$scope.reloadList();//重新加载
swal({
title:response.message,
text:'哈哈(2秒后自动关闭)!',
timer:2000,
type:'success'
});
}else{
swal({
title:response.message,
text:'哈哈(2秒后自动关闭)!',
timer:2000,
type:'error'
});
}
})
}else{
swal({
title:'错误',
text:'请至少选中一条数据进行删除,(2秒后自动关闭)!',
timer:2000,
type:'error'
});
}
})
}
});<file_sep>/aisile-parent/aisile-content-service/src/main/java/com/aisile/content/service/impl/ContentCategorySwerviceImpl.java
package com.aisile.content.service.impl;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import com.aisile.content.service.ContentCategoryService;
import com.aisile.mapper.TbContentCategoryMapper;
import com.aisile.pojo.TbContentCategory;
import com.aisile.pojo.TbContentCategoryExample;
import com.aisile.pojo.TbContentCategoryExample.Criteria;
import com.aisile.pojo.TbItemCat;
import com.aisile.pojo.entity.PageResult;
import com.alibaba.dubbo.config.annotation.Service;
import com.github.pagehelper.Page;
import com.github.pagehelper.PageHelper;
@Service
public class ContentCategorySwerviceImpl implements ContentCategoryService {
@Autowired
private TbContentCategoryMapper tbContentCategoryMapper;
@Override
public PageResult findSearch(int pageNum, int pageSize, TbContentCategory tbContentCategory) {
PageHelper.startPage(pageNum, pageSize);
TbContentCategoryExample example = new TbContentCategoryExample();
Criteria createCriteria = example.createCriteria();
if(tbContentCategory.getName() != null && !tbContentCategory.getName().equals("")) {
createCriteria.andNameLike("%"+tbContentCategory.getName()+"%");
}
Page<TbContentCategory> pageinfo = (Page<TbContentCategory>) tbContentCategoryMapper.selectByExample(example);
return new PageResult(pageinfo.getTotal(), pageinfo.getResult());
}
@Override
public void add(TbItemCat tbItemCat) {
// TODO Auto-generated method stub
}
@Override
public void update(TbItemCat tbItemCat) {
// TODO Auto-generated method stub
}
@Override
public TbItemCat findOne(Long id) {
// TODO Auto-generated method stub
return null;
}
@Override
public boolean delete(Long id) {
// TODO Auto-generated method stub
return false;
}
@Override
public List<TbItemCat> findAllByParentId(Long parentId) {
// TODO Auto-generated method stub
return null;
}
@Override
public List<TbItemCat> findOne1(Long id) {
// TODO Auto-generated method stub
return null;
}
@Override
public List<TbContentCategory> findAll() {
return tbContentCategoryMapper.selectByExample(null);
}
@Override
public PageResult findPage(int pageNum, int pageSize) {
// TODO Auto-generated method stub
return null;
}
}
<file_sep>/aisile-parent/aisile-content-service/src/main/java/com/aisile/content/service/impl/ContentServiceImpl.java
package com.aisile.content.service.impl;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.test.context.ContextConfiguration;
import com.aisile.content.service.ContentService;
import com.aisile.mapper.TbContentMapper;
import com.aisile.pojo.TbContent;
import com.aisile.pojo.TbContentExample;
import com.aisile.pojo.TbContentExample.Criteria;
import com.aisile.pojo.TbSeller;
import com.aisile.pojo.entity.PageResult;
import com.alibaba.dubbo.config.annotation.Service;
import com.github.pagehelper.Page;
import com.github.pagehelper.PageHelper;
@Service
@ContextConfiguration(locations="classpath:spring/applicationContext-redis.xml")
public class ContentServiceImpl implements ContentService {
@Autowired
private TbContentMapper tbContentMapper;
//注入Spring Data Redis
@Autowired
private RedisTemplate redisTemplate;
@Override
public List<TbContent> findAll() {
return tbContentMapper.selectByExample(null);
}
@Override
public PageResult findPage(int page, int rows) {
// TODO Auto-generated method stub
return null;
}
@Override
public PageResult findSearch(int page, int rows, TbContent tbContent) {
PageHelper.startPage(page, rows);
TbContentExample example = new TbContentExample();
Criteria criteria = example.createCriteria();
Page<TbContent> pageinof = (Page<TbContent>) tbContentMapper.selectByExample(example);
return new PageResult(pageinof.getTotal(), pageinof.getResult());
}
@Override
public void add(TbContent tbContent) {
if(tbContent.getStatus() != null) {
tbContentMapper.insert(tbContent);
}else {
tbContent.setStatus("0");
tbContentMapper.insert(tbContent);
}
//清楚缓存
redisTemplate.boundHashOps("content").delete(tbContent.getCategoryId());
}
@Override
public void update(TbContent tbContent) {
//查询原来分组ID
Long categoryId = tbContentMapper.selectByPrimaryKey(tbContent.getId()).getCategoryId();
//清除原来的分组缓存
redisTemplate.boundHashOps("content").delete(categoryId);
tbContentMapper.updateByPrimaryKey(tbContent);
//清除现在得到分组缓存
if(categoryId.longValue()!=tbContent.getCategoryId().longValue()){
redisTemplate.boundHashOps("content").delete(tbContent.getCategoryId());
}
}
@Override
public TbContent findOne(Long id) {
return tbContentMapper.selectByPrimaryKey(id);
}
@Override
public void delete(Long[] ids) {
for (Long id : ids) {
//查询原来分组ID
Long categoryId = tbContentMapper.selectByPrimaryKey(id).getCategoryId();
//清除原来的分组缓存
redisTemplate.boundHashOps("content").delete(categoryId);
tbContentMapper.deleteByPrimaryKey(id);
}
}
@Override
public void updateStatus(TbSeller tbSeller, String status) {
// TODO Auto-generated method stub
}
/**
* 根据广告分类ID查询列表
*/
@Override
public List<TbContent> findByCategoryId(Long categoryId) {
//先去查询缓存 如果有的话就直接return 如果没有的话,先去数据库里面去拿一下,然后在放在缓存中
List<TbContent> contentList = (List<TbContent>) redisTemplate.boundHashOps("content").get(categoryId);
if(contentList != null) {
System.out.println("进来了!!!");
return contentList;
}else {
TbContentExample example = new TbContentExample();
Criteria criteria = example.createCriteria();
criteria.andCategoryIdEqualTo(categoryId);
criteria.andStatusEqualTo("1");//开启默认
example.setOrderByClause("sort_order");//排序
contentList = tbContentMapper.selectByExample(example);
redisTemplate.boundHashOps("content").put(categoryId, contentList);//加入缓存 缓存同步
}
return contentList;
}
@Override
public void shield(Long[] ids) {
for (Long id : ids) {
TbContent content = tbContentMapper.selectByPrimaryKey(id);
content.setStatus("0");
tbContentMapper.updateByPrimaryKey(content);
}
}
@Override
public void openq(Long[] ids) {
for (Long id : ids) {
TbContent content = tbContentMapper.selectByPrimaryKey(id);
content.setStatus("1");
tbContentMapper.updateByPrimaryKey(content);
}
}
}
<file_sep>/aisile-parent/aisile-shop-web/src/main/webapp/js/controller/baseController.js
//父类公共的controller
app.controller('baseController', function($scope) {
// 重新加载方法
$scope.reloadList = function(){
//angularjs中的有参数方法调用
/* $scope.findPage($scope.paginationConf.currentPage,
$scope.paginationConf.itemsPerPage) */
$scope.findSearch($scope.paginationConf.currentPage,
$scope.paginationConf.itemsPerPage)
}
// 分页渲染控件方法
$scope.paginationConf = {
currentPage: 1,
totalItems: 10,
itemsPerPage: 10,
perPageOptions: [1,2,10, 20, 30, 40, 50],
onChange: function(){
$scope.reloadList();//重新加载
}
}
// 更改选中id(批量删除)
$scope.selectIds = [];//选中的id集合
$scope.updateSelections = function($event,id){//$event能够获得对象本身
//判断是否选中 如果被选中,就直接push进去,如果没有选中就根据下标移除
if($event.target.checked){//如果被选中,则增加到数组中
$scope.selectIds.push(id);
}else{//如果没有选中,则移除
var idx = $scpoe.selectIds.indexOf(id);
$scope.selectIds.splice(idx,1);
}
}
//提取json字符串数据中某个属性,返回拼接字符串 逗号分隔
$scope.jsonToString=function(jsonString,key){
var json=JSON.parse(jsonString);//将json字符串转换为json对象
var value="";
for(var i=0;i<json.length;i++){
if(i>0){
value+=","
}
value+=json[i][key];
}
return value;
}
//从集合中按照key查询对象
$scope.searchObjectByKey=function(list,key,keyValue){
for(var i=0;i<list.length;i++){
if(list[i][key]==keyValue){
return list[i];
}
}
return null;
}
});<file_sep>/aisile-parent/aisile-shop-web/src/main/webapp/js/service/goodsService.js
//封装所有连接后端代码 需要$http
app.service('goodsService',function($http){
//查询全部的数据
this.findAll = function(){
return $http.get('../goods/findAll.do');
}
//分页
this.findPage = function(page,rows){
return $http.get('../goods/findPage.do?page='+page+"&rows="+rows);
}
//分页带数据查询
this.findSearch = function(page,rows,searchEntity){
return $http.post('../goods/findSearch.do?page='+page+"&rows="+rows,searchEntity);
}
//修改
this.update = function(entity){
return $http.post('../goods/update.do',entity);
}
//添加
this.add = function(entity){
return $http.post('../goods/add.do',entity);
}
//根据id查询对象
this.findOne = function(id){
return $http.post('../goods/findOne.do?id='+id);
}
//删除
this.dele = function(selectIds){
return $http.get('../goods/dele.do?ids='+selectIds);
}
//审核通过
this.upDownFrame = function(selectIds,status){
return $http.get('../goods/upDownFrame.do?ids='+selectIds+"&status="+status);
}
//下拉框
this.selectOptionList = function(){
return $http.get('../goods/selectOptionList.do');
}
})<file_sep>/aisile-parent/aisile-shop-web/src/main/webapp/js/controller/goodsController.js
app.controller('goodsController', function($scope,$location,$http,goodsService,$controller,uploadService,itemCatService,typeTemplateService) {
//继承baseController
$controller('baseController',{$scope:$scope});
//定义初始数组
$scope.entity = {goodsDesc:{itemImages:[],specificationItems:[]}};
$scope.list = [];
$scope.findAll = function(){
//读取列表数据绑定到表单中
goodsService.findAll.success(
function(response){
$scope.list = response;
}
)
}
//上下架(这个地方,还可以在延伸业务,比如说上下架的时候,必须判断是否是审核通过或者是未上架)
$scope.upDownFrame = function(status){
goodsService.upDownFrame($scope.selectIds,status).success(function(response){
if(response.success){
alert(response.message);
$scope.reloadList();
$scope.selectIds = [];
}else{
alert(response.message);
}
})
}
//上传图片
$scope.uploadFile = function(){
uploadService.uploadFile().success(
function(response){
if(response.success){//如果上传成功,则取出url
//把这个url给entity.goods
//把图片回显出来
$scope.image_entity.url=response.message;
}else{
alert(response.message);
}
}
)
}
//定义页面实体结构
//$scope.entity = {goods:{},goodsDesc:{itemImages:[]}};
//添加图片到列表
$scope.add_image_entity = function(){
$scope.entity.goodsDesc.itemImages.push($scope.image_entity);
}
//删除图片列表根据对应的下标
$scope.remove_image_entity = function(index){
$scope.entity.goodsDesc.itemImages.splice(index,1);
}
//获取一级分类
$scope.selectItemCat1List = function(){
itemCatService.findAllByParentId(0).success(
function(response){
$scope.itemCat1List = response;
})
};
//获取二级分类 第一个参数是:监控水谁 第二个参数:监控entity.goods.category1Id0.
$scope.$watch('entity.goods.category1Id',function(newVal,oldVal){
itemCatService.findAllByParentId(newVal).success(
function(response){
$scope.itemCat2List = response;
})
})
//获取三级分类
$scope.$watch('entity.goods.category2Id',function(newVal,oldVal){
itemCatService.findAllByParentId(newVal).success(
function(response){
$scope.itemCat3List = response;
})
})
//监控三级分类获取模板ID
$scope.$watch('entity.goods.category3Id',function(newVal,oldVal){
itemCatService.findOne(newVal).success(
function(response){
//更换模板ID
$scope.entity.goods.typeTemplateId = response.typeId;//更新模板ID
})
})
//获取品牌列表,同时获取拓展属性,同时获取规格列表
//模板ID选择后,更新品牌列表
$scope.$watch('entity.goods.typeTemplateId',function(newVal,oldVal){
typeTemplateService.findOne(newVal).success(function(response){
//获取的结果是一个对象 需要把品牌的列表取出来
$scope.typeTemplate = response;//获取模板类型
$scope.typeTemplate.brandIds = JSON.parse(response.brandIds);
if($location.search()['id'] == null){
$scope.entity.goodsDesc.customAttributeItems = JSON.parse(response.customAttributeItems);
}
})
//获取规格列表
typeTemplateService.findSpecList(newVal).success(function(response){
$scope.specList = response;
})
})
//保存规格拼接json对象使用
$scope.updateSpecAttribute = function($event,name,value){
//下面这个方法的三个参数:第一个参数:要查询谁的集合 第二个:查询那个属性 第三个:关键字
var object = $scope.searchObjectByKey($scope.entity.goodsDesc.specificationItems,'attributeName',name);
if(object!=null){
//判断选中还是没有选中
if($event.target.checked){
//判断是谁的?
//如果没有就加进去
object.attributeValue.push(value);
}else{//如果有就删除
object.attributeValue.splice(object.attributeValue.indexOf(value),1);
//如果选项都取消了,将此条记录移除
if(object.attributeValue.length == 0){
$scope.entity.goodsDesc.specificationItems.splice($scope.entity.goodsDesc.specificationItems.indexOf(object),1);
}
}
}else{
//新增
$scope.entity.goodsDesc.specificationItems.push(
{"attributeName":name,"attributeValue":[value]});
}
}
//生成sku列表
$scope.createItemlist = function(){
//初始化一个列表
$scope.entity.itemList=[{spec:{},price:0,num:99999,status:'0',isDefault:'0' } ]
//获取用户选中的规格列表
var items= $scope.entity.goodsDesc.specificationItems;
//循环
for (var i = 0; i < items.length; i++) {
$scope.entity.itemList = addColumn($scope.entity.itemList,items[i].attributeName,items[i].attributeValue);
}
}
//添加列
addColumn=function(list,columnName,conlumnValues){
var newList=[];//新的集合
for(var i=0;i<list.length;i++){
var oldRow= list[i];
for(var j=0;j<conlumnValues.length;j++){
var newRow= JSON.parse( JSON.stringify( oldRow ) );//深克隆
newRow.spec[columnName]=conlumnValues[j];
newList.push(newRow);
}
}
return newList;
}
//分页
$scope.findPage = function(page,rows,goodsService){
goodsService.findPage.success(
function(response){
$scope.list = response.rows;
$scope.paginationConf.totalItems = response.total;
}
)
}
//分页带查询数据
$scope.searchEntity = {};
$scope.findSearch = function(page,rows){
goodsService.findSearch(page,rows,$scope.searchEntity).success(
function(response){
$scope.list = response.rows;
$scope.paginationConf.totalItems = response.total;
}
)
}
$scope.status=['未申请','申请中','审核通过','已驳回'];
/**
* 如何才能显示分类分类名称?
* 方法一:在后台代码中写关联查询语句,返回数据中直接有分类名称
* 方法二:在前段代码用ID去查询后端,异步返回商品分类名称
*/
//查询商品分类名称方法
$scope.itemCatList = [];
$scope.findItemCatList = function(){
itemCatService.findAll().success(function(response){
for (var i = 0; i < response.length; i++) {
$scope.itemCatList[response[i].id] = response[i].name;
}
})
}
//添加方法+修改方法
$scope.save = function(){
//把数据加载进来,描述数据
$scope.entity.goodsDesc.introduction = editor.html();
var serviceObject = {};
if($scope.entity.goods.id !=null){
serviceObject = goodsService.update($scope.entity);
}else{
serviceObject = goodsService.add($scope.entity)
}
serviceObject.success(function(response){
if(response.success){
$scope.entity={};
editor.html("");
swal({
title:response.message,
text:'添加成功(2秒后自动关闭)!',
timer:2000,
type:'success'
});
}else{
swal({
title:response.message,
text:'哈哈(2秒后自动关闭)!',
timer:2000,
type:'error'
});
}
})
}
//注册
$scope.add = function(){
goodsService.add($scope.entity).success(function(response){
if(response.success){
//如果登录成功,则跳转登录页面
location.href="shoplogin.html";
}else{
alert(response.message);
}
})
}
//查找对象
$scope.findOne = function(){
var id = $location.search()['id'];//获取参数值
console.log(id);
if(id == null){
return ;
}
goodsService.findOne(id).success(function(response){
$scope.entity = response;
//富文本编辑器 赋值
editor.html($scope.entity.goodsDesc.introduction);
//商品图片的回显
$scope.entity.goodsDesc.itemImages = JSON.parse($scope.entity.goodsDesc.itemImages);
//商品扩展属性展示
$scope.entity.goodsDesc.customAttributeItems = JSON.parse($scope.entity.goodsDesc.customAttributeItems);
//商品的规格明细扩展
$scope.entity.goodsDesc.specificationItems = JSON.parse($scope.entity.goodsDesc.specificationItems);
//商品sku列表
for (var i = 0; i < $scope.entity.itemList.length; i++) {
$scope.entity.itemList[i].spec = JSON.parse($scope.entity.itemList[i].spec);
}
})
}
//判断是否被选中
$scope.checkAttributeValue = function(specName,optionName){
var items = $scope.entity.goodsDesc.specificationItems;
var object = $scope.searchObjectByKey(items,'attributeName',specName);
if(object == null){
return false;
}else{
if(object.attributeValue.indexOf(optionName)>0){
return true;
}else{
return false;
}
}
}
//确认删除
$scope.del = function(){
swal({
title : '确定删除吗?',
text : '你将无法恢复它!',
type : 'warning',
showCancelButton : true,
confirmButtonColor : '#3085d6',
cancelButtonColor : '#d33',
confirmButtonText : '确定!',
cancelButtonText : '取消!',
confirmButtonClass : 'btn btn-success',
cancelButtonClass : 'btn btn-danger'
}).then(function(isConfirm){
if(isConfirm.value == true) {
goodsService.dele($scope.selectIds).success(function(response){
if(response.success){
$scope.reloadList();//重新加载
swal({
title:response.message,
text:'哈哈(2秒后自动关闭)!',
timer:2000,
type:'success'
});
}else{
swal({
title:response.message,
text:'哈哈(2秒后自动关闭)!',
timer:2000,
type:'error'
});
}
})
}else{
swal({
title:'错误',
text:'请至少选中一条数据进行删除,(2秒后自动关闭)!',
timer:2000,
type:'error'
});
}
})
}
});<file_sep>/aisile-parent/aisile-manager-web/src/main/java/com/aisile/manager/controller/BrandController.java
package com.aisile.manager.controller;
import java.util.List;
import java.util.Map;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import com.aisile.pojo.TbBrand;
import com.aisile.pojo.entity.PageResult;
import com.aisile.pojo.entity.Result;
import com.aisile.sellergoods.service.BrandService;
import com.alibaba.dubbo.config.annotation.Reference;
@RestController
@RequestMapping("/brand")
public class BrandController {
@Reference
private BrandService brandService;
@RequestMapping("/findAll")
public List<TbBrand> findAll(){
return brandService.geTbBrands();
}
@RequestMapping("/findPage")
public PageResult findPage( int page,int rows){
return brandService.findPage(page, rows);
}
@RequestMapping("/findSearch")
public PageResult findSearch(int page,int rows,@RequestBody TbBrand tbBrand) {
return brandService.findSearch(page, rows, tbBrand);
}
@RequestMapping("/add")
public Result add(@RequestBody TbBrand tbBrand) {
try {
brandService.add(tbBrand);
return new Result(true, "添加成功");
} catch (Exception e) {
System.out.println(e.getMessage());
return new Result(false, "添加失败");
}
}
@RequestMapping("/update")
public Result update(@RequestBody TbBrand tbBrand) {
try {
brandService.update(tbBrand);
return new Result(true, "修改成功");
} catch (Exception e) {
System.out.println(e.getMessage());
return new Result(false, "修改失败");
}
}
@RequestMapping("/findOne")
public TbBrand findOne(Long id) {
return brandService.findOne(id);
}
@RequestMapping("/dele")
public Result delete(Long [] ids) {
try {
brandService.delete(ids);
return new Result(true, "删除成功");
} catch (Exception e) {
System.out.println(e.getMessage());
return new Result(false, "删除失败");
}
}
@RequestMapping("/selectOptionList")
public List<Map> selectOptionList() {
return brandService.selectOptionList();
}
}
<file_sep>/1601u_shop/src/com/bw/beans/Shop.java
package com.bw.beans;
//商品主表信息
public class Shop {
private int sid;
private String sname;
private String brief;
private Pai pai;
private String price;
private Lei lei;
private Integer page;
private Integer rows;
public int getSid() {
return sid;
}
public void setSid(int sid) {
this.sid = sid;
}
public String getSname() {
return sname;
}
public void setSname(String sname) {
this.sname = sname;
}
public String getBrief() {
return brief;
}
public void setBrief(String brief) {
this.brief = brief;
}
public String getPrice() {
return price;
}
public void setPrice(String price) {
this.price = price;
}
public Pai getPai() {
return pai;
}
public void setPai(Pai pai) {
this.pai = pai;
}
public Lei getLei() {
return lei;
}
public void setLei(Lei lei) {
this.lei = lei;
}
@Override
public String toString() {
return "Shop [sid=" + sid + ", sname=" + sname + ", brief=" + brief + ", pai=" + pai + ", price=" + price
+ ", lei=" + lei + ", page=" + page + ", rows=" + rows + "]";
}
public Integer getPage() {
return page;
}
public void setPage(Integer page) {
this.page = page;
}
public Integer getRows() {
return rows;
}
public void setRows(Integer rows) {
this.rows = rows;
}
}
<file_sep>/aisile-parent/aisile-manager-web/src/main/webapp/js/controller/typeTemplateController.js
app.controller('typeTemplateController', function($scope,typeTemplateService,brandService,specificationService,$controller) {
//继承baseController
$controller('baseController',{$scope:$scope});
$scope.list = [];
//brand列表方法
$scope.findAll = function(){
//读取列表数据绑定到表单中
typeTemplateService.findAll().success(
function(response){
$scope.list = response;
}
)
}
//新增行数
$scope.addTableRow = function(){
//添加一行
$scope.entity.customAttributeItems.push({});
}
//删除行
$scope.deleteTableRow = function(index){
//splice有两个参数 第一个参数是从谁开始 第二个参数是几个
$scope.entity.customAttributeItems.splice(index,1);
}
//分页
$scope.findPage = function(page,rows){
typeTemplateService.findPage.success(
function(response){
$scope.list = response.rows;
$scope.paginationConf.totalItems = response.total;
}
)
}
//分页带查询数据
$scope.searchEntity = {};
$scope.findSearch = function(page,rows){
typeTemplateService.findSearch(page,rows,$scope.searchEntity).success(
function(response){
$scope.list = response.rows;
$scope.paginationConf.totalItems = response.total;
}
)
}
//添加方法+修改方法
$scope.save = function(){
if($scope.entity.id != null){
typeTemplateService.update($scope.entity).success(function(response){
if(response.success){
$scope.reloadList();//重新加载
alert(response.message);
}else{
alert(response.message);
}
})
}else{
typeTemplateService.add($scope.entity).success(function(response){
if(response.success){
$scope.reloadList();//重新加载
alert(response.message);
}else{
alert(response.message);
}
})
}
}
//查找对象
$scope.findOne = function(id){
typeTemplateService.findOne(id).success(function(response){
$scope.entity = response;
//select2控件需要回显数据,使用是json格式的对象 所以我们需要把属性值取到 然后在使用
//json.parse是js提供的
//有三种方法 eval() $parseJson() JSON.parse()都可以转换成json对象
$scope.entity.brandIds = JSON.parse($scope.entity.brandIds);//转换品牌列表
$scope.entity.specIds = JSON.parse($scope.entity.specIds);//转换规格列表
$scope.entity.customAttributeItems = JSON.parse($scope.entity.customAttributeItems);//转换扩展属性
})
}
//确认删除
$scope.del = function(){
swal({
title : '确定删除吗?',
text : '你将无法恢复它!',
type : 'warning',
showCancelButton : true,
confirmButtonColor : '#3085d6',
cancelButtonColor : '#d33',
confirmButtonText : '确定!',
cancelButtonText : '取消!',
confirmButtonClass : 'btn btn-success',
cancelButtonClass : 'btn btn-danger'
}).then(function(isConfirm){
if(isConfirm.value == true) {
typeTemplateService.dele($scope.selectIds).success(function(response){
if(response.success){
$scope.reloadList();//重新加载
swal({
title:response.message,
text:'哈哈(2秒后自动关闭)!',
timer:2000,
type:'success'
});
}else{
swal({
title:response.message,
text:'哈哈(2秒后自动关闭)!',
timer:2000,
type:'error'
});
}
})
}else{
swal({
title:'错误',
text:'请至少选中一条数据进行删除,(2秒后自动关闭)!',
timer:2000,
type:'error'
});
}
})
}
//定义brandList列表
$scope.brandList = {data:[]};
//品牌列表
$scope.findBrandList= function(){
brandService.selectOptionList().success(function(response){
$scope.brandList = {data:response};
})
}
//定义specList列表
$scope.specList = {data:[]};
//规格明细列表
$scope.findSpecList= function(){
specificationService.selectOptionList().success(function(response){
$scope.specList = {data:response};
})
}
});<file_sep>/aisile-parent/aisile-manager-web/src/main/webapp/js/service/goodsService.js
//封装所有连接后端代码 需要$http
app.service('goodsService',function($http){
//查询全部的数据
this.findAll = function(){
return $http.get('../goods/findAll.do');
}
//分页
this.findPage = function(page,rows){
return $http.get('../goods/findPage.do?page='+page+"&rows="+rows);
}
//分页带数据查询
this.findSearch = function(page,rows,searchEntity){
return $http.post('../goods/findSearch.do?page='+page+"&rows="+rows,searchEntity);
}
//修改
this.update = function(entity){
return $http.post('../goods/update.do',entity);
}
//添加
this.add = function(entity){
return $http.post('../goods/add.do',entity);
}
//根据id查询对象
this.findOne = function(id){
return $http.post('../goods/findOne.do?id='+id);
}
//删除
this.dele = function(selectIds){
return $http.get('../goods/dele.do?ids='+selectIds);
}
//驳回
this.turnDown = function(selectIds){
return $http.get('../goods/turnDown.do?ids='+selectIds);
}
//审核通过
this.openGo = function(selectIds){
return $http.get('../goods/openGo.do?ids='+selectIds);
}
//下拉框
this.selectOptionList = function(){
return $http.get('../goods/selectOptionList.do');
}
})
|
bdb5aff0c56781c43837e404d77807673e35f9ef
|
[
"JavaScript",
"Java"
] | 28
|
Java
|
DJL0504/Git
|
720607a145401362d7c29c1cc56b7f7e0c1ad520
|
65de5be7cb9d6e107b54b78ce9ca513369996949
|
refs/heads/main
|
<repo_name>shabna21/shabna_keralatourism_ictkerala<file_sep>/index.html
<!DOCTYPE html>
<html lang="en">
<head>
<title>Kerala Tourism</title>
</head>
<link rel="stylesheet" href="./css/css/style.css">
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.0.0-beta1/dist/css/bootstrap.min.css" rel="stylesheet" integrity="<KEY>" crossorigin="anonymous">
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.0.0-beta1/dist/js/bootstrap.bundle.min.js" integrity="<KEY>" crossorigin="anonymous"></script>
<body background="./images/assign pics/bg2.jpg">
<nav class="navbar">
<img class="logo" src="./images/assign pics/logo.png">
<ul>
<li><a class="active" href="index.html">Home</a></li>
<li><a href="login.html">Login</a></li>
<li><a href="SignUp.html">Sign Up</a></li>
<li><a href="districts.html">Districts</a></li>
<li><a href="#">Contact Us</a></li>
<li><a href="#">Search</a></li>
<input type="text" placeholder="Search">
</ul>
</nav>
<section>
<div>
<h1>Welcome to Kerala</h1>
<h4>God's Own Country</h4>
</div>
<div>
<h1>Welcome to kerala</h1>
<h4>God's Own Country</h4>
</div>
</section>
<div class="container-main">
<div id="content-wrap">
<div class="myMargin">
<div id="carouselExampleIndicators" class="carousel slide" data-bs-ride="carousel">
<ol class="carousel-indicators">
<li data-bs-target="#carouselExampleIndicators" data-bs-slide-to="0" class="active"></li>
<li data-bs-target="#carouselExampleIndicators" data-bs-slide-to="1"></li>
<li data-bs-target="#carouselExampleIndicators" data-bs-slide-to="2"></li>
<li data-bs-target="#carouselExampleIndicators" data-bs-slide-to="3"></li>
<li data-bs-target="#carouselExampleIndicators" data-bs-slide-to="4"></li>
<li data-bs-target="#carouselExampleIndicators" data-bs-slide-to="5"></li>
</ol>
<div class="carousel-inner">
<div class="carousel-item active">
<img src="./images/assign pics/img2.jpg" class="d-block w-100" width="300" height="400"alt="img1">
</div>
<div class="carousel-item">
<img src="./images/assign pics/img3.jpg" class="d-block w-100" width="300" height="400" alt="img2">
</div>
<div class="carousel-item">
<img src="./images/assign pics/img4.jpg" class="d-block w-100" width="300" height="400" alt="img3">
</div>
<div class="carousel-item">
<img src="./images/assign pics/img5.jpg" class="d-block w-100" width="300" height="400" alt="img4">
</div>
<div class="carousel-item">
<img src="./images/assign pics/img6.jpg" class="d-block w-100" width="300" height="400" alt="img5">
</div>
<div class="carousel-item">
<img src="./images/assign pics/img7.jpg" class="d-block w-100"width="300" height="400" alt="img6">
</div>
</div>
<a class="carousel-control-prev" href="#carouselExampleIndicators" role="button" data-bs-slide="prev">
<span class="carousel-control-prev-icon" aria-hidden="true"></span>
<span class="visually-hidden">Previous</span>
</a>
<a class="carousel-control-next" href="#carouselExampleIndicators" role="button" data-bs-slide="next">
<span class="carousel-control-next-icon" aria-hidden="true"></span>
<span class="visually-hidden">Next</span>
</a>
</div>
</div>
</div>
<div class="cont-1">
<div class="circle1"></div>
<h3>KERALA:Districts,Culture & Tourism</h3>
<p><span class="head-para"><b>Kerala</span> is a state in the Republic of India. It is in the south-west of the country.The capital city of the state is <span class="head-para">Thiruvananthapuram</span>.There are 14 districts in Kerala. They are: Alappuzha, Ernakulam, Idukki, Kannur, Kasaragod, Kollam, Kottayam, Kozhikode, Malappuram, Palakkad, Pathanamthitta, Thiruvananthapuram, Thrissur and Wayanad. People in Kerala speak Malayalam. Kerala is known for traditional arts and people enjoy traditional, percussion-filled music. They also enjoy tribal ceremonies, martial arts, and sports such as soccer, cricket, and badminton. Kerala is popular for Kalaripayattu, a martial arts form and Kathakali Kathakali, a Hindu performance art. Kerala is one of the biggest tourist attractions in India, both for Indians as well as foreigners. It has beautiful beaches, hill stations and extensive backwaters. Kerala is also known for its diverse nature. </p>
</div>
<div class="Pl-ker">
<div class="card">
<img src="./images/assign pics/mainbg1.jpg" width="200" height="200" class="card-img-top" alt="">
<div class="card-body">
<h5 class="card-title"><span class="pl-head">Places to Visit in Kerala</span></h5>
<p class="card-text">Alleppey, Munnar, Waynand, Cochin, Kovalam, Varkala, Thekkady, Kumarakom, Idukki, Bekal, Thrissur, Kasargod, Vaganom, Kollam, Kuttanad and many more.
There are so many places to visit in Kerala that even a week’s trip is not enough. It is such a beautiful and charismatic land that it is known as God’s own country. Kerala is one such state in India that is a blend of natural beauty and magic. Known as the “Spice Capital of India”, Kerala has been the central destination for honeymoons and tourism.
Be prepared to be enchanted by the backwaters in Alleppey, wildlife and museums in Wayanad, eco-village of Idukki, Valara waterfalls, and the classic Kochi. The magic of Kerala never vanishes, though it takes your breath away. The enthralling glimpse of the wildlife in Kerala and the beautiful tea plantations make you want to stay here forever. Apart from villages and cities with natural beauty and plantations, there are a plethora of tourist places to visit in Kerala.
Once you are here, there is no going back. Be sure to visit the famous Thrissur Zoo and witness the fauna of Kerala, along with visiting the rich Wayanad Heritage Museum, Blossom Park, shop at the Triveni Floating Market and spend some quality time at the Lockhart Gap ViewPoint.</p>
<!-- <p class="card-text"><small class="text-muted">Last updated 3 mins ago</small></p> -->
</div>
</div>
<div class="card">
<img src="./images/assign pics/img3.jpg" width="200" height="200" class="card-img-top" alt="">
<div class="card-body">
<h5 class="card-title">Alappuzha</h5>
<p class="card-text">Alappuzha (Alleppey) is known as ‘the Venice of the East’. Offering the best tourist places in Alleppey, this charming place is the hub of Kerala’s backwaters and is home to a huge network of backwaters and more than thousand houseboats. The houseboats you find in the backwaters of Alappuzha are in fact a reworked version of the Kettuvallams of olden times. Kettuvallam is a Malayalam word, ‘Kettu’, means living structures and ‘Vallom’ means boat. In the olden days, kettuvallam or boat with a thatched roof that covers over wooden hulls was used to carry tons of rice and spices.
The modern houseboats are equipped with all the comforts of a good hotel room like air conditioners, modern toilets, cozy living rooms, a kitchen and even a balcony for angling. An uninterrupted view of life in the backwaters fringed with coconut trees can be enjoyed from a houseboat. A perfect place to unwind with its laidback canals and lush greenery. The Alappuzha beach with a 137-year-old pier extending into the sea and an old 17th-century lighthouse built by the Portuguese add to the magic of the place. It is famous for its boat races, marine products, and coir industry.
So get ready to wander around this small but chaotic city center and bus-stand area, with its modest grid of canals. Head west to the beach or in practically any other direction towards the backwaters and Alleppey becomes elegant and greenery-fringed, waning into a watery world of hamlets, punted canoes, toddy shops and, of course, houseboats. Float along and gaze over paddy fields of succulent green, curvaceous rice barges and village life along the banks. Indeed, it is here that nature has spent upon the land her richest bounties. Come and explore the best tourist places in Alleppey.</p>
<!-- <p class="card-text"><small class="text-muted">Last updated 3 mins ago</small></p> -->
</div>
</div>
<div class="card mb-3">
<img src="./images/assign pics/pl-wayanad.jpg" width="200" height="200" class="card-img-top" alt="">
<div class="card-body">
<h5 class="card-title">Wayanad</h5>
<p class="card-text">Known for its soothing temperature all around the year, Wayanad is the ultimate destination to escape the scorching sun of North India. This place doesn’t only offer its cool temperature but along with that, it has untouched woods, misty mountains, and long spread plantations. Often called the spice garden of India, Wayanad is laid with many tourist places and vast orchards of various spices like cardamom, Black pepper, Star Anise, Fenugreek, Black cumin and more. These green orchards of distinct spices give this place a different fragrance which is further enriched with the aroma of coffee plantation, nutrition of organic fruits and vegetables and waving paddy fields.
Having said this much about the greenery in the city, it is understood that it will have numerous wildlife sanctuaries, reserves and national parks protecting the rural life of the forests. Adding to all the natural beauty you have India’s largest earth dam and Asia’s second-largest earth dam, Banasura Sagar Dam in Wayanad. Having mighty mountains, dense woods, and gushing rivers, Wayanad is a hub for trekking, hiking, mountaineering, rock climbing, rappelling, camping, and many more adventurous activities.
You can trek to India’s largest Earth Dam, Banasura Dam or trek to witness the sun setting behind the mountains from Wayanad’s highest peak, Chembra Peak. The list doesn’t end here, you have the chance to explore potentially Neolithic age at Edakkal Caves. You will find some petroglyphs- prehistoric rock carvings used as a communication tool.
Moreover, you will find some amazing examples of ancient architecture and religious beliefs through the monuments like Thirunelli Temple, Karinthandan Temple, Ancient Jain Temple and much more. All these threads weave into an amazing tapestry that will glorify your photo album as well as the album of your life.</p>
<!-- <p class="card-text"><small class="text-muted">Last updated 3 mins ago</small></p> -->
</div>
</div>
<div class="card">
<img src="./images/assign pics/pl-cochi.jpg" width="200" height="200" class="card-img-bottom" alt="">
<div class="card-body">
<h5 class="card-title">Cochin</h5>
<p class="card-text">Kochi (also colonially known as Cochin) is a city located on the southwest coast of India in the state of Kerala. It has been a major port since the 19th century and was built by the British Raj in India for its strategic and industrial importance in foreign trade. Kochi is arguably the ideal starting point for exploring the diversity and beauty of Kerala. With the best places to visit in Cochin, it has been rated among the top three tourist destinations by the World Travel & Tourism Council. Cochin and its surrounding hill station Munnar are a must visit for those touring Kerala. The backwaters of Kerala can especially be experienced through the waterways flowing across Cochin in streams and lagoons. Such places have now been converted into picnic and fishing spots, where visitors can boat through the water and stop midway on an isolated camp for some private time with nature. Fort Kochi, one of the most famous beaches has a colonial era feel and cool winds from the Arabian sea that soothes the nerves of backpackers sailing or cycling across the city. Cochin is also particularly known for its religious houses especially British era churches, and forts that have been converted into temples.
This lovely seaside city is flanked by the Western Ghats on the east and the Arabian Sea on the west. Its proximity to the equator, the vast water body and the hills provide a moderate equatorial climate for visitors. With the best places to visit in Cochin, informally it is also referred to as the Gateway to Kerala</p>
<a href="districts.html" class="dis">See More</a>
</div>
</div>
</div>
</div>
</body>
</html>
<file_sep>/Idukki.html
<!DOCTYPE html>
<html lang="en">
<head>
<title>Kerala Tourism</title>
</head>
<link rel="stylesheet" href="./css/css/style.css">
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.0.0-beta1/dist/css/bootstrap.min.css" rel="stylesheet" integrity="<KEY>" crossorigin="anonymous">
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.0.0-beta1/dist/js/bootstrap.bundle.min.js" integrity="<KEY>" crossorigin="anonymous"></script>
<body background="./assign pics/img1.jpg">
<nav class="navbar">
<img class="logo" src="./images/assign pics/logo.png">
<ul>
<li><a class="active" href="index.html">Home</a></li>
<li><a href="login.html">Login</a></li>
<li><a href="SignUp.html">Sign Up</a></li>
<li><a href="districts.html">Districts</a></li>
<li><a href="#">Contact Us</a></li>
<li><a href="#">Search</a></li>
<input type="text" placeholder="Search">
</ul>
</nav>
<div class="head">
<h1>Idukki</h1>
<div class="container1-idk">
<div class="circle-idk"></div>
<h3>Idukki</h3>
<p>Idukki is a district in the Indian state of Kerala.[2] It was constituted on 26 January 1972, by splitting the district of Kottayam into two parts. Its division was earlier headquartered at Kottayam city, but moved to Painavu in June 1976. Idukki district lies amid the Western Ghats of Kerala.
Though it is regarded as the second-largest district in the region, it has the lowest population density among the other districts of Kerala,the urban population being higher than the rural. Idukki is rich in forests and also known as the "Spice Garden of Kerala". To the humankind lost in the concrete jungle, Idukki tourist places acquaints them with the splendor of nature and God’s magnificent creation. Idukki, one of the quaintest districts of Kerala, is home to a number of prominent tourist places.
The district, which lies in the Western Ghats of Kerala, is topographically known for its rocky terrain and varied verdant vegetation. Places in Idukki is full of cliffs, lush greenery, lagoons, tranquil rivers and cascading waterfalls that hypnotize the tourists and leave them with everlasting memories. Having names of Painavu, arch dam, poonchira, Idukki shows a variety in its attractions.
The 3 rivers Thodupuzhayar, Periyar, and Thalaya along with their tributaries keep Idukki evergreen. They are not only a source for irrigation and power generation but also a place for the existence of diverse flora and fauna that add charm to the Idukki. The places to visit in Idukki range from popular hill stations, Wildlife Sanctuaries, to majestic waterfalls, dams, and not to forget, the historical places.</p>
</div>
</div>
<div class="dist-img">
<div id="carouselExampleDark" class="carousel slide" data-bs-ride="carousel">
<ol class="carousel-indicators">
<li data-bs-target="#carouselExampleDark" data-bs-slide-to="0" class="active"></li>
<li data-bs-target="#carouselExampleDark" data-bs-slide-to="1"></li>
<li data-bs-target="#carouselExampleDark" data-bs-slide-to="2"></li>
</ol>
<div class="carousel-inner">
<div class="carousel-item active" data-bs-interval="10000">
<img src="./images/assign pics/idk1.jpg" class="d-block w-100" width="350" height="370" alt="kasar1">
<div class="carousel-caption d-none d-md-block">
<h5>Idukki Wildlife Sanctuary</h5>
</div>
</div>
<div class="carousel-item" data-bs-interval="2000">
<img src="./images/assign pics/idk2.jpg" class="d-block w-100" width="350" height="370" alt="kasar">
<div class="carousel-caption d-none d-md-block">
<h5>Keezharkuthu Falls</h5>
</div>
</div>
<div class="carousel-item">
<img src="./images/assign pics/idk3.jpg" class="d-block w-100" width="350" height="370" alt="kasar2">
<div class="carousel-caption d-none d-md-block">
<h5>Hill View Park</h5>
</div>
</div>
</div>
<a class="carousel-control-prev" href="#carouselExampleDark" role="button" data-bs-slide="prev">
<span class="carousel-control-prev-icon" aria-hidden="true"></span>
<span class="visually-hidden">Previous</span>
</a>
<a class="carousel-control-next" href="#carouselExampleDark" role="button" data-bs-slide="next">
<span class="carousel-control-next-icon" aria-hidden="true"></span>
<span class="visually-hidden">Next</span>
</a>
</div>
</div>
</body>
</html><file_sep>/css/css/form.js
function validation(){
var user = document.getElementById('user').value;
var pass = document.getElementById('pass').value;
var confirmpass = document.getElementById('conpass').value;
var mobileNumber = document.getElementById('mobileNumber').value;
var emails = document.getElementById('emails').value;
if(user == ""){
document.getElementById('username').innerHTML =" ** Please fill the username field";
return false;
}
if((user.length <= 2) || (user.length > 20)) {
document.getElementById('username').innerHTML =" ** Username lenght must be between 2 and 20";
return false;
}
if(!isNaN(user)){
document.getElementById('username').innerHTML =" ** only characters are allowed";
return false;
}
if(pass == ""){
document.getElementById('passwords').innerHTML =" ** Please fill the password field";
return false;
}
if((pass.length <= 5) || (pass.length > 20)) {
document.getElementById('passwords').innerHTML =" ** Passwords lenght must contain atleast 8 characters,atleast one uppercase and one lowercase,must contain atleast one number";
return false;
}
if(pass!=confirmpass){
document.getElementById('confrmpass').innerHTML =" ** Password does not match the confirm password";
return false;
}
if(confirmpass == ""){
document.getElementById('confrmpass').innerHTML =" ** Please fill the confirmpassword field";
return false;
}
if(mobileNumber == ""){
document.getElementById('mobileno').innerHTML =" ** Please fill the mobile Number field";
return false;
}
if(isNaN(mobileNumber)){
document.getElementById('mobileno').innerHTML =" ** user must write digits only not characters";
return false;
}
if(mobileNumber.length!=10){
document.getElementById('mobileno').innerHTML =" ** Mobile Number must be 10 digits";
return false;
}
if(emails == ""){
document.getElementById('emailids').innerHTML =" ** Please fill the email id field";
return false;
}
if(emails.indexOf('@') <= 0 ){
document.getElementById('emailids').innerHTML =" ** @ Invalid Position";
return false;
}
if((emails.charAt(emails.length-4)!='.') && (emails.charAt(emails.length-3)!='.')){
document.getElementById('emailids').innerHTML =" ** . Invalid Position";
return false;
}
}
// login form validation
function validate(){
var emails = document.getElementById("emails").value;
var pwd = document.getElementById("pwd").value;
if(emails == ""){
document.getElementById("username").innerHTML =" ** Please fill the username field";
return false;
}
if((emails.length <= 2) || (emails.length > 20)) {
document.getElementById("username").innerHTML =" ** Username lenght must be between 2 and 20";
return false;
}
if(!isNaN(emails)){
document.getElementById("username").innerHTML =" ** only characters are allowed";
return false;
}
if(pwd == ""){
document.getElementById("password").innerHTML =" ** Please fill the password field";
return false;
}
if((pwd.length <= 5) || (pwd.length > 20)) {
document.getElementById("password").innerHTML =" ** Passwords lenght must be between 5 and 20";
return false;
}
}
// password strength bar
function CheckPasswordStrength(password) {
var password_strength = document.getElementById("password_strength");
//TextBox left blank.
if (password.length == 0) {
password_strength.innerHTML = "";
return;
}
//Regular Expressions.
var regex = new Array();
regex.push("[A-Z]"); //Uppercase Alphabet.
regex.push("[a-z]"); //Lowercase Alphabet.
regex.push("[0-9]"); //Digit.
regex.push("[$@$!%*#?&]"); //Special Character.
var passed = 0;
//Validate for each Regular Expression.
for (var i = 0; i < regex.length; i++) {
if (new RegExp(regex[i]).test(password)) {
passed++;
}
}
//Validate for length of Password.
if (passed > 2 && password.length > 8) {
passed++;
}
//Display status.
var color = "";
var strength = "";
switch (passed) {
case 0:
case 1:
strength = "Weak";
color = "red";
break;
case 2:
strength = "Good";
color = "darkorange";
break;
case 3:
case 4:
strength = "Strong";
color = "green";
break;
case 5:
strength = "Very Strong";
color = "darkgreen";
break;
}
password_strength.innerHTML = strength;
password_strength.style.color = color;
}
<file_sep>/kozhikode.html
<!DOCTYPE html>
<html lang="en">
<head>
<title>Kerala Tourism</title>
</head>
<link rel="stylesheet" href="./css/css/style.css">
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.0.0-beta1/dist/css/bootstrap.min.css" rel="stylesheet" integrity="<KEY>" crossorigin="anonymous">
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.0.0-beta1/dist/js/bootstrap.bundle.min.js" integrity="<KEY>" crossorigin="anonymous"></script>
<body background="./assign pics/img1.jpg">
<nav class="navbar">
<img class="logo" src="./images/assign pics/logo.png">
<ul>
<li><a class="active" href="index.html">Home</a></li>
<li><a href="login.html">Login</a></li>
<li><a href="SignUp.html">Sign Up</a></li>
<li><a href="districts.html">Districts</a></li>
<li><a href="#">Contact Us</a></li>
<li><a href="#">Search</a></li>
<input type="text" placeholder="Search">
</ul>
</nav>
<div class="head">
<h1>Kozhikode</h1>
<div class="container-kzd">
<div class="circle-kzd"></div>
<h3>Kozhikode:</h3>
<p>Calicut, also known by the name Kozhikode, is a northern district located on India’s south-west coastal belt. The headquarters of this district is also known by the same name i.e. the city of Calicut. Surrounded by the Arabian Sea in the west, this region was the glorious centre of trade and commerce in the bygone era. Calicut is famed for, its culture and cuisines stand out demanding the attention of travellers and historians alike. This region is the birthplace of the world famous folk songs known by the name <NAME>. Calicut thrives with rich Islamic art forms which contributed to the uniqueness of the cultural history of the region. Oppana (a Muslim dance form) and Mappilapattukal (Muslim Songs) and Gazals are the most popular Islamic art forms of this place. Beypore and Kappad Beach,Peruvannamuzhi Dam are perfect spots for family picnics and one day trips. . Other popular attractions of Calicut include Thikkoti Lighthouse, Manachira Square, Pazhassiraja Museum, Kalipoyika, Lion's Park, Tali Temple, Kakkayam, Krishna Menon Museum and planetarium.</p>
</div>
</div>
<div class="dist-img">
<div id="carouselExampleDark" class="carousel slide" data-bs-ride="carousel">
<ol class="carousel-indicators">
<li data-bs-target="#carouselExampleDark" data-bs-slide-to="0" class="active"></li>
<li data-bs-target="#carouselExampleDark" data-bs-slide-to="1"></li>
<li data-bs-target="#carouselExampleDark" data-bs-slide-to="2"></li>
</ol>
<div class="carousel-inner">
<div class="carousel-item active" data-bs-interval="10000">
<img src="./images/assign pics/kzd1.jpg" class="d-block w-100" width="350" height="370" alt="kasar1">
<div class="carousel-caption d-none d-md-block">
<h5>Kappad Beach</h5>
</div>
</div>
<div class="carousel-item" data-bs-interval="2000">
<img src="./images/assign pics/kzd2.jpg" class="d-block w-100" width="350" height="370" alt="kasar">
<div class="carousel-caption d-none d-md-block">
<h5>Beypore Beach</h5>
</div>
</div>
<div class="carousel-item">
<img src="./images/assign pics/kzd3.jpg" class="d-block w-100" width="350" height="370" alt="kasar2">
<div class="carousel-caption d-none d-md-block">
<h5>Kakkayam Dam</h5>
</div>
</div>
</div>
<a class="carousel-control-prev" href="#carouselExampleDark" role="button" data-bs-slide="prev">
<span class="carousel-control-prev-icon" aria-hidden="true"></span>
<span class="visually-hidden">Previous</span>
</a>
<a class="carousel-control-next" href="#carouselExampleDark" role="button" data-bs-slide="next">
<span class="carousel-control-next-icon" aria-hidden="true"></span>
<span class="visually-hidden">Next</span>
</a>
</div>
</div>
</body>
</html><file_sep>/README.md
# shabna_kerala-tourism_ictkerala
|
0e90eb5f2823e13c26d7641e37ad34bc2a2761d5
|
[
"JavaScript",
"HTML",
"Markdown"
] | 5
|
HTML
|
shabna21/shabna_keralatourism_ictkerala
|
a85b7b999a97f826121ab658e354f177a902c686
|
11ce80bd10a9976499104e61fe5bb4dade41b96c
|
refs/heads/master
|
<file_sep>const mongoose = require("mongoose");
const { Schema } = mongoose;
const userSchema = new Schema({
name: {
first: String,
last: String
},
boards: {
type: Array,
default: [],
required: false
}
});
const User = mongoose.model("users", userSchema);
module.exports = User;
<file_sep>import React, { Component } from "react";
import Boards from "./Navbar/BoardsDropdown/Boards";
import SearchBar from "./Navbar/SearchBar";
import HomeButton from "./Navbar/HomeButton";
import AddNew from "./Navbar/AddNew";
import InfoButton from "./Navbar/InfoButton";
import NotifyButton from "./Navbar/NotifyButton";
import Profile from "./Navbar/Profile";
import "./Navbar/Navbar.css";
class Navbar extends Component {
constructor(props) {
super(props);
this.sate = {};
}
userArr = [
{
name: {
first: "testingFirstName",
last: "testingLastName"
},
boards: [
{
name: "testing board 1",
image: "testing img link",
description: "testing description here"
}
]
},
{
name: {
first: "testing firstName 2",
last: "testing lastName 2"
},
boards: [
{
name: "testing board 2",
image: "testing img link 2",
description: "testing description 2 here"
}
]
}
];
renderUser() {
this.userArr.map(({ name, boards }) => {
return <Profile user={name} boards={boards} />;
});
}
render() {
return (
<nav className="nav-wrapper">
<div className="nav__boards">
<Boards />
</div>
<div className="nav__search">
<SearchBar />
</div>
<div className="nav__home">
<HomeButton />
</div>
<div className="nav__add">
<AddNew />
</div>
<div className="nav__info">
<InfoButton />
</div>
<div className="nav__notify">
<NotifyButton />
</div>
<div className="nav__profile">
<Profile currentUser={this.renderUser} />
</div>
</nav>
);
}
}
export default Navbar;
<file_sep>import React, { Component } from 'react'
const HomeButton = () => {
return (
<a className="nav__home--button">
Home
</a>
)
}
export default HomeButton;<file_sep>const mongoose = require("mongoose");
const db = require("./models");
mongoose.Promise = global.Promise;
mongoose.connect("mongodb://localhost/trello-clone");
const userSeed = [
{
name: {
first: "Trevor",
last: "Meadows"
},
boards: []
},
{
name: {
first: "Jack",
last: "Peanut"
},
boards: []
},
{
name: {
first: "Mug",
last: "O'Coffee"
},
boards: []
},
{
name: {
first: "React",
last: "Redux"
},
boards: []
},
{
name: {
first: "Sunny",
last: "Brightside"
},
boards: []
},
{
name: {
first: "Lauren",
last: "McAndrew"
},
boards: []
},
{
name: {
first: "Johnny",
last: "Boi"
},
boards: []
}
];
db.User.remove({}).then(db.User.create(userSeed));
<file_sep>const express = require("express");
const path = require("path");
const mongoose = require("mongoose");
const bodyParser = require("body-parser");
const app = express();
const mongoConnection = function() {
mongoose.connect("mongodb://localhost/trello-clone");
};
mongoose.Promise = global.Promise;
// Serve static files from the React app
app.use(express.static(path.join(__dirname, "client/build")));
// Put all API endpoints under '/api'
app.get("/api/test", (req, res) => {
const fakeData = {
status: "ok",
code: 200
};
res.json(fakeData);
});
// The "catchall" handler: for any request that doesn't
// match one above, send back React's index.html file.
app.get("*", (req, res) => {
res.sendFile(path.join(__dirname + "/client/build/index.html"));
});
const handleUnexpectedError = (err, req, res, next) => {
console.log("Unexpected error: " + JSON.stringify(err));
res.json({ status: 500 });
};
app.use(handleUnexpectedError);
module.exports = app;
<file_sep>import React, { Component } from "react";
import BoardsList from "./BoardsList";
import "./Boards.css";
let boards = [
{
name: "testing 1",
image: "link to img",
description: "description of board/group here"
},
{
name: "testing 2",
image: "link to img",
description: "description of board/group here"
},
{
name: "testing 3",
image: "link to img",
description: "description of board/group here"
},
{
name: "testing 4",
image: "link to img",
description: "description of board/group here"
},
{
name: "testing 5",
image: "link to img",
description: "description of board/group here"
}
];
class Boards extends Component {
constructor(props) {
super(props);
this.state = {
showComponent: false
};
this.showDropdownMenu = this.showDropdownMenu.bind(this);
this.hideDropdownMenu = this.hideDropdownMenu.bind(this);
}
showDropdownMenu(event) {
event.preventDefault();
this.setState({ showComponent: true }, () => {
document.addEventListener("click", this.hideDropdownMenu);
});
}
hideDropdownMenu() {
this.setState({ showComponent: false }, () => {
document.removeEventListener("click", this.hideDropdownMenu);
});
}
render() {
return (
<div className="boardsMenu">
<a className="button" onClick={this.showDropdownMenu}>
Boards
</a>
{this.state.showComponent ? (
<div>
<BoardsList board={boards} />
</div>
) : null}
</div>
);
}
}
export default Boards;
<file_sep>import React, { Component } from "react";
import BoardsListItem from "./BoardsListItem";
console.log(this.props);
class BoardsList extends Component {
render() {
return (
<ul>
{this.props.board.map(item => {
return (
<BoardsListItem
key={item.name}
name={item.name}
image={item.image}
description={item.description}
/>
);
})}
</ul>
);
}
}
export default BoardsList;
<file_sep>import React, { Component } from 'react'
class NotifyButton extends Component {
render() {
return (
<a>
Notifications
</a>
)
}
}
export default NotifyButton;<file_sep>import React, { Component } from "react";
class SearchBar extends Component {
constructor(props) {
super(props);
this.state = {
term: "no results found",
showComponent: false
};
this.termSearch = this.termSearch.bind(this);
this.showDropdownMenu = this.showDropdownMenu.bind(this);
}
showDropdownMenu(event) {
event.preventDefault();
this.setState({ showComponent: true });
}
termSearch(event, term) {
event.preventDefault();
let inputTerm = event.target.value;
if (inputTerm === "testing") {
return this.setState({ term: inputTerm });
} else {
return this.setState({ term: "no results found" });
}
}
render() {
return (
<div className="nav__boards--searchBar">
<input placeholder="Search for a board" onChange={this.termSearch} />
<a
className="nav__boards--searchButton"
onClick={this.showDropdownMenu}
>
Search
</a>
{this.state.showComponent ? (
<div>
<h1>{this.state.term}</h1>
</div>
) : null}
</div>
);
}
}
export default SearchBar;
|
e2bcab78dbd3e164cc2cfc26f94e03db328c9b6b
|
[
"JavaScript"
] | 9
|
JavaScript
|
tlm04070/trello-clone
|
d8e675396288e0ec3d8504377d04962120ea6812
|
e16b901a3c6c406b79d764b18766416a9c26add4
|
refs/heads/master
|
<repo_name>jefersondeoliveira/JavaWebCodigoAulaDudu<file_sep>/README.md
JavaWebCodigoAulaDudu
=====================
Projeto que esta sendo desenvolvido em sala de aula.
<file_sep>/br.com.javaweb.aula-ejb/src/java/br/com/javaweb/aula/service/IModeloService.java
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package br.com.javaweb.aula.service;
import br.com.javaweb.aula.entidade.Modelo;
import java.util.List;
/**
*
* @author Jeferson
*/
public interface IModeloService {
String salvar(Modelo modelo);
public List<Modelo> listar();
public String remover(Short id);
}
<file_sep>/br.com.javaweb.aula-war/src/java/br/com/javaweb/aula/controller/CorManagedBean.java
package br.com.javaweb.aula.controller;
import br.com.javaweb.aula.entidade.Cor;
import br.com.javaweb.aula.service.ICorService;
import java.util.List;
import javax.ejb.EJB;
import javax.enterprise.context.RequestScoped;
import javax.inject.Named;
@Named("corManagedBean")
@RequestScoped
public class CorManagedBean {
private Cor cor;
@EJB
private ICorService corService;
public List<Cor> listar() {
return corService.listar();
}
public CorManagedBean(){
cor = new Cor();
}
public Cor getCor() {
return cor;
}
public void setCor(Cor cor) {
this.cor = cor;
}
}
<file_sep>/br.com.javaweb.aula-ejb/src/java/br/com/javaweb/aula/serviceimpl/AutomovelService.java
package br.com.javaweb.aula.serviceimpl;
import br.com.javaweb.aula.entidade.Automovel;
import br.com.javaweb.aula.service.IAutomovelService;
import java.util.List;
import javax.ejb.Stateless;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.TypedQuery;
@Stateless
public class AutomovelService implements IAutomovelService{
@PersistenceContext
private EntityManager em;
@Override
public List<Automovel> listar() {
TypedQuery<Automovel> automovelQuery =
em.createQuery("select a from Automovel as a",
Automovel.class);
return automovelQuery.getResultList();
}
@Override
public String salvar(Automovel automovel) {
try {
em.merge(automovel);
} catch (Exception ex) {
ex.printStackTrace();
return ex.getMessage();
}
return null;
}
@Override
public String remover(Integer idAutomovel) {
try {
em.remove(em.find(Automovel.class, idAutomovel));
} catch (Exception ex) {
ex.printStackTrace();
return ex.getMessage();
}
return null;
}
}
<file_sep>/br.com.javaweb.aula-ejb/src/java/br/com/javaweb/aula/service/IMarcaService.java
package br.com.javaweb.aula.service;
import br.com.javaweb.aula.entidade.Marca;
import br.com.javaweb.aula.model.GraficoModel;
import java.util.List;
public interface IMarcaService {
public List<Marca> listar();
public String salvar(Marca marca);
public String remover(Short id);
public List<GraficoModel> gerarGrafico();
}
<file_sep>/br.com.javaweb.aula-ejb/src/java/br/com/javaweb/aula/entidade/Marca.java
package br.com.javaweb.aula.entidade;
import java.util.List;
import java.util.Objects;
import javax.persistence.*;
import javax.validation.constraints.NotNull;
@Entity
@Table(name = "marca")
public class Marca {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@Column(name="idMarca",nullable = false)
private Short idMarca;
@Column(name = "descricao",nullable=false,length = 50)
@NotNull(message = "Informe a descrição")
private String descricao;
@OneToMany(mappedBy = "marca",
fetch = FetchType.LAZY)
private List<Modelo> modelos;
public Short getIdMarca() {
return idMarca;
}
public void setIdMarca(Short idMarca) {
this.idMarca = idMarca;
}
public String getDescricao() {
return descricao;
}
public void setDescricao(String descricao) {
this.descricao = descricao;
}
public List<Modelo> getModelos() {
return modelos;
}
public void setModelos(List<Modelo> modelos) {
this.modelos = modelos;
}
@Override
public int hashCode() {
int hash = 7;
hash = 19 * hash + Objects.hashCode(this.idMarca);
return hash;
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final Marca other = (Marca) obj;
if (!Objects.equals(this.idMarca, other.idMarca)) {
return false;
}
return true;
}
}
<file_sep>/br.com.javaweb.aula-war/src/java/br/com/javaweb/aula/controller/CombustivelManagedBean.java
package br.com.javaweb.aula.controller;
import br.com.javaweb.aula.entidade.Combustivel;
import br.com.javaweb.aula.service.ICombustivelService;
import java.util.List;
import javax.ejb.EJB;
import javax.enterprise.context.RequestScoped;
import javax.inject.Named;
@Named("combustivelManagedBean")
@RequestScoped
public class CombustivelManagedBean {
private Combustivel combustivel;
@EJB
private ICombustivelService combustivelService;
public List<Combustivel> listar() {
return combustivelService.listar();
}
public CombustivelManagedBean(){
combustivel = new Combustivel();
}
public Combustivel getCombustivel() {
return combustivel;
}
public void setCombustivel(Combustivel combustivel) {
this.combustivel = combustivel;
}
}
<file_sep>/br.com.javaweb.aula-ejb/src/java/br/com/javaweb/aula/service/IAutomovelService.java
package br.com.javaweb.aula.service;
import br.com.javaweb.aula.entidade.Automovel;
import java.util.List;
public interface IAutomovelService {
public List<Automovel> listar();
public String salvar(Automovel automovel);
public String remover(Integer id);
}
<file_sep>/br.com.javaweb.aula-ejb/src/java/br/com/javaweb/aula/serviceimpl/OpcionalService.java
package br.com.javaweb.aula.serviceimpl;
import br.com.javaweb.aula.entidade.Opcional;
import br.com.javaweb.aula.service.IOpcionalService;
import java.util.List;
import javax.ejb.Stateless;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.TypedQuery;
@Stateless
public class OpcionalService implements IOpcionalService {
@PersistenceContext
private EntityManager em;
@Override
public List<Opcional> listar() {
TypedQuery<Opcional> opcionalQuery
= em.createQuery("select o from Opcional as o",
Opcional.class);
return opcionalQuery.getResultList();
}
@Override
public String salvar(Opcional opcional) {
try {
em.merge(opcional);
} catch (Exception ex) {
ex.printStackTrace();
return ex.getMessage();
}
return null;
}
@Override
public String remover(Short idOpcional) {
try {
em.remove(em.find(Opcional.class, idOpcional));
} catch (Exception ex) {
ex.printStackTrace();
return ex.getMessage();
}
return null;
}
}
<file_sep>/br.com.javaweb.aula-ejb/src/java/br/com/javaweb/aula/serviceimpl/ModeloService.java
package br.com.javaweb.aula.serviceimpl;
import br.com.javaweb.aula.service.IModeloService;
import br.com.javaweb.aula.entidade.Modelo;
import java.util.List;
import javax.ejb.Stateless;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.TypedQuery;
/**
*
* @author Jeferson
*/
@Stateless
public class ModeloService implements IModeloService {
@PersistenceContext
private EntityManager em;
@Override
public String salvar(Modelo modelo){
try {
em.merge(modelo);
} catch (Exception e) {
e.printStackTrace();
return e.getMessage();
}
return null;
}
@Override
public List<Modelo> listar() {
TypedQuery<Modelo> modeloQuery
= em.createQuery("select m from Modelo as m",
Modelo.class);
return modeloQuery.getResultList();
}
@Override
public String remover(Short idModelo) {
try {
em.remove(em.find(Modelo.class, idModelo));
} catch (Exception ex) {
ex.printStackTrace();
return ex.getMessage();
}
return null;
}
}
<file_sep>/br.com.javaweb.aula-ejb/src/java/br/com/javaweb/aula/service/ICorService.java
package br.com.javaweb.aula.service;
import br.com.javaweb.aula.entidade.Cor;
import java.util.List;
public interface ICorService {
public List<Cor> listar();
}
<file_sep>/br.com.javaweb.aula-ejb/src/java/br/com/javaweb/aula/entidade/Cor.java
package br.com.javaweb.aula.entidade;
import java.util.List;
import java.util.Objects;
import javax.persistence.*;
@Entity
@Table(name = "Cor")
public class Cor
{
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@Column(name = "idCor", nullable = false)
private Short idCor;
@Column(name = "descricao", nullable = false, length = 50)
private String descricao;
@OneToMany(mappedBy = "cor", fetch = FetchType.LAZY)
private List<Automovel> automovel;
public Short getIdCor() {
return idCor;
}
public void setIdCor(Short idCor) {
this.idCor = idCor;
}
public String getDescricao() {
return descricao;
}
public void setDescricao(String descricao) {
this.descricao = descricao;
}
public List<Automovel> getAutomovel() {
return automovel;
}
public void setAutomovel(List<Automovel> automovel) {
this.automovel = automovel;
}
@Override
public int hashCode() {
int hash = 7;
hash = 31 * hash + Objects.hashCode(this.idCor);
return hash;
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final Cor other = (Cor) obj;
if (!Objects.equals(this.idCor, other.idCor)) {
return false;
}
return true;
}
}
|
22c7678b1a3d85f856bf70e280c87e9065e2b9c3
|
[
"Markdown",
"Java"
] | 12
|
Markdown
|
jefersondeoliveira/JavaWebCodigoAulaDudu
|
7310992051c09890abc905d8a5996e34d9b42f9d
|
086b2371f8fe5f9007310d4218725b25953210bd
|
refs/heads/master
|
<file_sep>DROP TABLE IF EXISTS user_roles;
DROP TABLE IF EXISTS menu_dishes;
DROP TABLE IF EXISTS users;
DROP TABLE IF EXISTS menus;
DROP TABLE IF EXISTS dishes;
DROP TABLE IF EXISTS restaurants;
DROP SEQUENCE IF EXISTS global_seq;
CREATE SEQUENCE global_seq START WITH 100000;
CREATE TABLE users
(
id INTEGER DEFAULT nextval('global_seq') PRIMARY KEY,
name VARCHAR NOT NULL,
email VARCHAR NOT NULL,
password VARCHAR NOT NULL,
enabled BOOL DEFAULT TRUE NOT NULL,
registered TIMESTAMP DEFAULT now() NOT NULL
);
CREATE TABLE restaurants
(
id INTEGER DEFAULT nextval('global_seq') PRIMARY KEY,
name VARCHAR NOT NULL,
active BOOL DEFAULT TRUE NOT NULL
);
CREATE UNIQUE INDEX restaurants_unique_name_idx ON restaurants (name);
CREATE TABLE user_roles
(
user_id INTEGER NOT NULL,
role VARCHAR,
CONSTRAINT user_roles_idx UNIQUE (user_id, role),
FOREIGN KEY (user_id) REFERENCES users (id) ON DELETE CASCADE
);
CREATE TABLE dishes
(
id INTEGER DEFAULT nextval('global_seq') PRIMARY KEY,
name VARCHAR NOT NULL,
price INTEGER NOT NULL,
active BOOL DEFAULT TRUE NOT NULL,
restaurant_id INTEGER NOT NULL,
FOREIGN KEY (restaurant_id) REFERENCES restaurants (id) ON DELETE CASCADE
);
CREATE UNIQUE INDEX dishes_unique_name_restaurant_idx ON dishes (name, restaurant_id);
CREATE TABLE menus
(
id INTEGER DEFAULT nextval('global_seq') PRIMARY KEY,
date TIMESTAMP DEFAULT now() NOT NULL,
restaurant_id INTEGER NOT NULL,
FOREIGN KEY (restaurant_id) REFERENCES restaurants (id) ON DELETE CASCADE
);
CREATE TABLE menu_dishes
(
menu_id INTEGER NOT NULL,
dish_id INTEGER NOT NULL,
FOREIGN KEY (menu_id) REFERENCES menus (id) ON DELETE CASCADE,
FOREIGN KEY (dish_id) REFERENCES dishes (id),
CONSTRAINT menu_dishes_idx UNIQUE (menu_id, dish_id)
)<file_sep>package javvernaut.votingsystem.repository;
import javvernaut.votingsystem.model.Dish;
import javvernaut.votingsystem.repository.jpa.DishJpaRepository;
import javvernaut.votingsystem.repository.jpa.RestaurantJpaRepository;
import org.springframework.stereotype.Repository;
import org.springframework.transaction.annotation.Transactional;
import java.util.List;
@Repository
public class DishRepository {
private final DishJpaRepository dishJpaRepository;
private final RestaurantJpaRepository restaurantJpaRepository;
public DishRepository(DishJpaRepository dishJpaRepository, RestaurantJpaRepository restaurantJpaRepository) {
this.dishJpaRepository = dishJpaRepository;
this.restaurantJpaRepository = restaurantJpaRepository;
}
public Dish get(int id, int restaurantId) {
return dishJpaRepository.findByIdAndRestaurantId(id, restaurantId).orElseThrow();
}
public List<Dish> getAll(int restaurantId) {
return dishJpaRepository.findAllByRestaurantId(restaurantId);
}
@Transactional
public Dish save(Dish dish, int restaurantId) {
if (!dish.isNew() && dishJpaRepository.findByIdAndRestaurantId(dish.getId(), restaurantId).isEmpty()) {
return null;
}
dish.setRestaurant(restaurantJpaRepository.getOne(restaurantId));
return dishJpaRepository.save(dish);
}
/* @Transactional
public void delete(int id, int restaurantId) {
Dish dish = dishJpaRepository.findOneByIdAndRestaurantId(id, restaurantId).orElseThrow();
dish.setActive(false);
}*/
}
<file_sep>package javvernaut.votingsystem.web.user;
import javvernaut.votingsystem.model.User;
import javvernaut.votingsystem.to.UserTo;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.servlet.support.ServletUriComponentsBuilder;
import javax.validation.Valid;
import java.net.URI;
import static javvernaut.votingsystem.util.security.SecurityUtil.authUserId;
@RestController
@RequestMapping(value = UserController.PROFILE_URL, produces = MediaType.APPLICATION_JSON_VALUE)
public class UserController extends AbstractUserController {
public static final String PROFILE_URL = "/profile";
@GetMapping
public User get() {
return super.get(authUserId());
}
@DeleteMapping
@ResponseStatus(HttpStatus.NO_CONTENT)
public void delete() {
super.delete(authUserId());
}
@PostMapping(value = "/register", consumes = MediaType.APPLICATION_JSON_VALUE)
@ResponseStatus(HttpStatus.CREATED)
public ResponseEntity<User> register(@Valid @RequestBody UserTo userTo) {
User created = super.create(userTo);
URI uriOfNewResource = ServletUriComponentsBuilder.fromCurrentContextPath()
.path(PROFILE_URL).build().toUri();
return ResponseEntity.created(uriOfNewResource).body(created);
}
@PutMapping(consumes = MediaType.APPLICATION_JSON_VALUE)
@ResponseStatus(HttpStatus.NO_CONTENT)
public void update(@Valid @RequestBody UserTo userTo) {
super.update(userTo, authUserId());
}
}
<file_sep>package javvernaut.votingsystem.web.user;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.*;
class UserControllerTest {
@Test
void get() {
}
@Test
void delete() {
}
@Test
void register() {
}
@Test
void update() {
}
}<file_sep>package javvernaut.votingsystem;
import javvernaut.votingsystem.model.Role;
import javvernaut.votingsystem.model.User;
import static javvernaut.votingsystem.model.AbstractBaseEntity.START_SEQ;
public class UserTestData {
public static final javvernaut.votingsystem.TestMatcher<User> USER_MATCHER = javvernaut.votingsystem.TestMatcher.usingIgnoringFieldsComparator(User.class, "registered", "meals", "password");
public static final int USER_ID = START_SEQ;
public static final int ADMIN_ID = START_SEQ + 1;
public static final int NOT_FOUND = 10;
public static final User mockUser = new User(USER_ID, "User", "<EMAIL>", "<PASSWORD>", Role.USER);
public static final User mockAdmin = new User(ADMIN_ID, "admin", "<EMAIL>", "admin", Role.USER, Role.ADMIN);
}
<file_sep>package javvernaut.votingsystem.util;
import javvernaut.votingsystem.HasId;
import javvernaut.votingsystem.util.exception.IllegalRequestDataException;
import lombok.experimental.UtilityClass;
@UtilityClass
public class ValidationUtil {
public static void checkNew(HasId bean) {
if (!bean.isNew()) {
throw new IllegalRequestDataException(bean + " must be new (id=null)");
}
}
public static void assureIdConsistent(HasId bean, int id) {
// conservative when you reply, but accept liberally (http://stackoverflow.com/a/32728226/548473)
if (bean.isNew()) {
bean.setId(id);
} else if (bean.getId() != id) {
throw new IllegalRequestDataException(bean + " must be with id=" + id);
}
}
}
<file_sep>package javvernaut.votingsystem.repository.jpa;
import javvernaut.votingsystem.model.Dish;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.transaction.annotation.Transactional;
import java.util.List;
import java.util.Optional;
@Transactional(readOnly = true)
public interface DishJpaRepository extends JpaRepository<Dish, Integer> {
Optional<Dish> findByIdAndRestaurantId(int id, int restaurantId);
List<Dish> findAllByRestaurantId(int restaurantId);
}
<file_sep>DELETE
FROM user_roles;
DELETE
FROM menu_dishes;
DELETE
FROM users;
DELETE
FROM menus;
DELETE
FROM dishes;
DELETE
FROM restaurants;
ALTER SEQUENCE global_seq RESTART WITH 100000;
INSERT INTO users (name, email, password)
VALUES ('User', '<EMAIL>', '{noop}password'),
('Admin', '<EMAIL>', '{noop}admin');
INSERT INTO user_roles (role, user_id)
VALUES ('USER', 100000),
('ADMIN', 100001),
('USER', 100001);
INSERT INTO RESTAURANTS(NAME)
VALUES ('McDonalds'),
('Burger King');
INSERT INTO dishes(name, price, restaurant_id)
VALUES ('Chicken', 253, 100002),
('Soup', 400, 100002),
('Turkey in pita', 350, 100002),
('Borscht', 333, 100002),
('Juice', 333, 100002),
('Pizza', 333, 100003),
('Beer', 333, 100003),
('BlackJack', 333, 100003),
('Courtesans', 333, 100003);
INSERT INTO menus(date, restaurant_id)
VALUES ('2020-12-10 00:00:00', 100002),
('2020-12-11 00:00:00', 100002),
('2020-12-12 00:00:00', 100002),
('2020-12-10 00:00:00', 100003),
('2020-12-11 00:00:00', 100003),
('2020-12-12 00:00:00', 100003);
INSERT INTO menu_dishes(menu_id, dish_id)
VALUES (100013, 100008),
(100013, 100006),
(100013, 100005),
(100014, 100004),
(100014, 100005),
(100014, 100007),
(100015, 100004),
(100015, 100005),
(100015, 100006),
(100015, 100007),
(100016, 100009),
(100016, 100010),
(100017, 100012),
(100018, 100010),
(100018, 100011),
(100018, 100009);
|
b08d7e48115efe9a8442cd19636b2dd4d3bea7d2
|
[
"Java",
"SQL"
] | 8
|
SQL
|
codacy-badger/votingsystem-1
|
81ed7230196f993c845a9287bb47c8b0e063d876
|
c666a5263966c72b278b2a31451ec9b35906fd93
|
refs/heads/main
|
<repo_name>bhavya-netizen/Project-28-Plucking-Mangoes<file_sep>/sketch.js
const Engine = Matter.Engine;
const World = Matter.World;
const Bodies = Matter.Bodies;
const Body = Matter.Body;
const Constraint = Matter.Constraint;
var tree, stone, ground, launcher;
var mango1, mango2, mango3, mango4, mango5, mango6, mango7, mango8, mango9, mango10, mango11, mango12;
var world, boy;
function preload(){
boy = loadImage("boy.png");
}
function setup() {
createCanvas(1300, 600);
engine = Engine.create();
world = engine.world;
stone = new Stone(235,420,30);
mango1 = new Mango(1100,100,30);
mango2 = new Mango(1170,130,30);
mango3 = new Mango(1010,140,30);
mango4 = new Mango(1000,70,30);
mango5 = new Mango(1100,70,30);
mango6 = new Mango(1000,230,30);
mango7 = new Mango(900,230,40);
mango8 = new Mango(1100,160,35);
mango9 = new Mango(1100,230,40);
mango10 = new Mango(1200,200,40);
mango11 = new Mango(1080,40,40);
mango12 = new Mango(900,160,40);
mango13 = new Mango(820,200,40);
mango14 = new Mango(750,280,40);
mango15 = new Mango(650,300,40);
mango16 = new Mango(700,230,40);
mango17 = new Mango(500,180,30);
mango18 = new Mango(600,150,40);
mango19 = new Mango(800,50,40);
mango20 = new Mango(700,90,40);
tree = new Tree(1000,350);
ground = new Ground(width/2, 870 ,width, 20);
launcher = new Launcher(stone.body,{x:235,y:420})
Engine.run(engine);
}
function draw() {
background("skyblue");
Engine.update(engine);
textSize(20);
fill("purple");
text("Press Space to get a second Chance to Play!!",50 ,50);
image(boy ,200, 340, 200, 300);
launcher.display();
tree.display();
stone.display();
ground.display();
mango1.display();
mango2.display();
mango3.display();
mango4.display();
mango6.display();
mango7.display();
mango8.display();
mango9.display();
mango10.display();
mango11.display();
mango12.display();
mango13.display();
mango14.display();
mango15.display();
mango16.display();
mango17.display();
mango18.display();
mango19.display();
mango20.display();
detectollision(stone, mango1);
detectollision(stone, mango2);
detectollision(stone, mango3);
detectollision(stone, mango4);
detectollision(stone, mango5);
detectollision(stone, mango6);
detectollision(stone, mango7);
detectollision(stone, mango8);
detectollision(stone, mango9);
detectollision(stone, mango10);
detectollision(stone, mango11);
detectollision(stone, mango12);
detectollision(stone, mango13);
detectollision(stone, mango14);
detectollision(stone, mango15);
detectollision(stone, mango16);
detectollision(stone, mango17);
detectollision(stone, mango18);
detectollision(stone, mango19);
detectollision(stone, mango20);
}
function mouseDragged(){
Matter.Body.setPosition(stone.body, {x:mouseX, y:mouseY})
}
function mouseReleased(){
launcher.fly();
}
function keyPressed() {
if (keyCode === 32) {
Matter.Body.setPosition(stone.body, {x:235, y:420})
launcher.attach(stone.body);
}
}
function detectollision(lstone,lmango){
mangoBodyPosition = lmango.body.position
stoneBodyPosition = lstone.body.position
var distance = dist(stoneBodyPosition.x, stoneBodyPosition.y, mangoBodyPosition.x, mangoBodyPosition.y)
if (distance<=lmango.r+lstone.r){
Matter.Body.setStatic(lmango.body, false);
}
}
|
ec9d96aed328b5d5454fff36037fbcd680224c37
|
[
"JavaScript"
] | 1
|
JavaScript
|
bhavya-netizen/Project-28-Plucking-Mangoes
|
ad9b9cf5546e3371fca9c66427a0084ef632318c
|
885806fa1c4d73d02bb1759c439be8d5f92696be
|
refs/heads/master
|
<repo_name>Allitems/nurseaid<file_sep>/src/main/java/com/nurseaid/security/Security.java
package com.nurseaid.security;
public class Security {
public boolean validate()
{
return true;
}
}
<file_sep>/src/main/java/com/nurseaid/controller/AuthController.java
package com.nurseaid.controller;
import java.util.Locale;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpSession;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import com.nurseaid.protocol.RequestUtility;
/**
* Handles requests for login, registrations, validation and forgot password
*/
@Controller
public class AuthController {
/**
* @param locale
* @param model - form data model
* @param sess
* @param req
* @return
*/
@RequestMapping(value = "/employee/register", method = {RequestMethod.GET, RequestMethod.POST})
public String employeeRegister(Locale locale, Model model, HttpSession sess, HttpServletRequest req) {
if(RequestUtility.isGet(req))
{
model.addAttribute("fx", "HomeController : home() ");
return "auth/employee_register";
}
else
if(RequestUtility.isPost(req))
{
//User v = (User)sess.getAttribute("session_data");
//model.addAttribute("fx", "HomeController : home() " + v.getUserName());
return "auth/employee_register";
}
return "error";
}
/**
* @param locale
* @param model - form data model
* @param sess
* @param req
* @return
*/
@RequestMapping(value = "/forgot_password", method = {RequestMethod.GET, RequestMethod.POST})
public String forgotPassword(Locale locale, Model model, HttpSession sess, HttpServletRequest req) {
if(RequestUtility.isGet(req))
{
model.addAttribute("fx", "HomeController : home() ");
return "auth/forgot_password";
}
else
if(RequestUtility.isPost(req))
{
//User v = (User)sess.getAttribute("session_data");
//model.addAttribute("fx", "HomeController : home() " + v.getUserName());
return "auth/forgot_password";
}
return "error";
}
/**
* Simply selects the home view to render by returning its name.
*/
@RequestMapping(value = "/login", method = {RequestMethod.GET, RequestMethod.POST})
public String login(Locale locale, Model model, HttpSession sess, HttpServletRequest req) {
if(RequestUtility.isGet(req))
{
model.addAttribute("fx", "HomeController : home() ");
return "auth/login";
}
else
if(RequestUtility.isPost(req))
{
//User v = (User)sess.getAttribute("session_data");
//model.addAttribute("fx", "HomeController : home() " + v.getUserName());
return "auth/login";
}
return "error";
}
}
<file_sep>/src/main/java/com/nurseaid/controller/CustomerController.java
package com.nurseaid.controller;
import java.util.ArrayList;
import java.util.Locale;
import javax.servlet.http.HttpSession;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.SessionAttributes;
import com.nurseaid.model.User;
import com.nurseaid.service.CustomerService;
/**
* Handles requests for the application home page.
*/
@Controller
@SessionAttributes("session_data")
public class CustomerController {
private CustomerService c_service;
/**
* Simply selects the home view to render by returning its name.
*/
@RequestMapping(value = "/customer/list", method = RequestMethod.GET)
public String listAction(Locale locale, Model model, HttpSession sess) {
c_service = new CustomerService();
User u = c_service.createCustomer("Nimesh");
c_service.addCustomerToList(u);
u = c_service.createCustomer("Ram");
c_service.addCustomerToList(u);
u = c_service.createCustomer("Krishna");
c_service.addCustomerToList(u);
model.addAttribute("fx", "CustomerController : listAction()");
User v = (User)sess.getAttribute("session_data");
String uname = "";
if(v != null){
uname = v.getUserName();
}
model.addAttribute("fx", "List Customers " + uname);
model.addAttribute("data_list", c_service.getAllCustomers() );
return "customer/list";
}
}
<file_sep>/src/main/java/com/nurseaid/message/MessageCode.java
package com.nurseaid.message;
import java.util.HashMap;
public class MessageCode {
HashMap<String, String[]> bundle = new HashMap();
public MessageCode()
{
bundle.put("ERROR_LOGIN", new String[]{"error", "SOmething Went Wfong"});
bundle.put("INFO_LOGIN", new String[]{"success", "Queue Created"});
}
}
<file_sep>/src/main/java/com/nurseaid/model/User.java
package com.nurseaid.model;
import java.io.Serializable;
import javax.validation.constraints.Size;
import org.hibernate.validator.constraints.Email;
import org.hibernate.validator.constraints.NotEmpty;
public class User implements Serializable
{
@NotEmpty(message = "Username Cannot be empty.")
private String userName;
@NotEmpty(message = "Email Cannot be empty.")
@Email(message = "Not a valid email.")
private String email;
@NotEmpty(message = "Please enter your password.")
@Size(min = 6, max = 15, message = "Your password must between 6 and 15 characters")
private String password;
public String getUserName() {
return userName;
}
public void setUserName(String userName) {
this.userName = userName;
}
public void setEmail(String email) {
this.email = email;
}
public void setPassword(String password) {
this.password = <PASSWORD>;
}
public String getEmail() {
return email;
}
public String getPassword() {
return <PASSWORD>;
}
}
|
07b6cd722e7153d6bd264e2b4f515a22e75718f1
|
[
"Java"
] | 5
|
Java
|
Allitems/nurseaid
|
f1ce1427fce5eea335dc86a8474290b981a531f8
|
022bbb69381088b6ae728c31904ef233eeae5284
|
refs/heads/master
|
<repo_name>wangsheng332526/myrpc<file_sep>/README.md
看了一段时间dubbox的源码后,写了几个功能完成自己的rpc框架.
整合了spring,用netty通讯,zookeeper为注册中心.
服务提供者用netty发布服务并将服务地址注册到zookeeper,等待调用.
服务调用者在注册中心找到服务提供者的地址,使用动态代理,传递调用方法和参数去调用netty服务端,然后服务提供者使用反射执行方法后将结果返回给调用者
先运行TimeServer.java,再运行Test.java查看执行结果
<file_sep>/fly-demo-myrpc/src/main/java/test/ReferenceBeanDefinitionParser.java
package test;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.beans.factory.support.BeanDefinitionRegistry;
import org.springframework.beans.factory.support.RootBeanDefinition;
import org.springframework.beans.factory.xml.BeanDefinitionParser;
import org.springframework.beans.factory.xml.ParserContext;
import org.w3c.dom.Element;
/**
* TODO 请在此处添加注释
*
* @author <a href="mailto:"wangsheng"<EMAIL>”>"wangsheng"</a>
* @version 2018年11月30日 下午2:25:14
* @since 2.0
*/
public class ReferenceBeanDefinitionParser implements BeanDefinitionParser {
private final Class<?> beanClass;
public ReferenceBeanDefinitionParser(Class<?> beanClass) {
this.beanClass = beanClass;
}
@Override
public BeanDefinition parse(Element element, ParserContext parserContext) {
RootBeanDefinition beanDefinition = new RootBeanDefinition();
beanDefinition.setBeanClass(beanClass);
beanDefinition.setLazyInit(false);
String id = element.getAttribute("id");
beanDefinition.getPropertyValues().add("sid", element.getAttribute("sid"));
beanDefinition.getPropertyValues().add("interfaceName", element.getAttribute("interfaceName"));
// 注册bean到BeanDefinitionRegistry中
parserContext.getRegistry().registerBeanDefinition(id, beanDefinition);
beanDefinition.getPropertyValues().addPropertyValue("id", id);
return beanDefinition;
}
}
<file_sep>/fly-demo-myrpc/src/main/java/io/netty/handler/codec/msgpack/MsgpackEncoder.java
package io.netty.handler.codec.msgpack;
import org.msgpack.MessagePack;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.MessageToByteEncoder;
/**
* msgpack编码器
* @author <a href="mailto:"wangsheng"<EMAIL>”>"wangsheng"</a>
* @version 2018年10月24日 下午2:55:17
* @since 2.0
*/
public class MsgpackEncoder extends MessageToByteEncoder<Object>{
@Override
protected void encode(ChannelHandlerContext arg0, Object arg1, ByteBuf arg2) {
MessagePack msgpack = new MessagePack();
byte[] raw = null;
try {
raw = msgpack.write(arg1);
// raw = msgpack.write(arg1,ObjectTemplate.getInstance());
} catch (Exception e) {
e.printStackTrace();
}
arg2.writeBytes(raw);
}
}
<file_sep>/fly-demo-myrpc/src/main/java/test/ReferenceBean.java
package test;
import java.lang.reflect.Proxy;
import org.springframework.beans.factory.FactoryBean;
/**
* @author <a href="mailto:"wangsheng"<EMAIL>”>"wangsheng"</a>
* @version 2018年11月30日 下午2:13:38
* @since 2.0
*/
public class ReferenceBean implements FactoryBean{
private String id;
private String sid;
private String interfaceName;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getSid() {
return sid;
}
public void setSid(String sid) {
this.sid = sid;
}
public String getInterfaceName() {
return interfaceName;
}
public void setInterfaceName(String interfaceName) {
this.interfaceName = interfaceName;
}
@Override
public Object getObject() throws Exception {
Class<?> interfaceClass = Class.forName(interfaceName, true, Thread.currentThread()
.getContextClassLoader());
Class<?>[] interfaces = new Class<?>[] {interfaceClass};
return Proxy.newProxyInstance(Thread.currentThread().getContextClassLoader(),
interfaces,new InvokerInvocationHandler(interfaceClass,id));
}
@Override
public Class getObjectType() {
Class<?> interfaceClass = null;
try {
interfaceClass = Class.forName(interfaceName, true, Thread.currentThread()
.getContextClassLoader());
} catch (ClassNotFoundException e) {
e.printStackTrace();
}
return interfaceClass;
}
@Override
public boolean isSingleton() {
return true;
}
}
<file_sep>/fly-demo-myrpc/src/main/java/test/User.java
package test;
import org.msgpack.annotation.Message;
/**
* TODO 请在此处添加注释
* @author <a href="mailto:"wangsheng"<EMAIL>”>"wangsheng"</a>
* @version 2018年11月29日 下午2:05:04
* @since 2.0
*/
@Message
public class User {
private Integer id;
private String name;
private Integer age;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Integer getAge() {
return age;
}
public void setAge(Integer age) {
this.age = age;
}
}
<file_sep>/fly-demo-myrpc/src/main/java/test/UserService.java
package test;
/**
* TODO 请在此处添加注释
* @author <a href="mailto:"<EMAIL>"@<EMAIL>”>"wangsheng"</a>
* @version 2018年11月29日 下午2:03:30
* @since 2.0
*/
public interface UserService {
public String sayHi(String str,Integer sId);
}
<file_sep>/fly-demo-myrpc/src/main/test/Test.java
/**************************************************************************
* Copyright (c) 2016-2020 ZheJiang International E-Commerce Services Co.,Ltd.
* All rights reserved.
*
* 名称:kafka
* 版权说明:本软件属于浙江国贸云商企业服务有限公司所有,在未获得浙江国贸云商企业服务有限公司正式授权
* 情况下,任何企业和个人,不能获取、阅读、安装、传播本软件涉及的任何受知
* 识产权保护的内容。
***************************************************************************/
package test;
/**
* TODO 请在此处添加注释
* @author <a href="mailto:"wangsheng"<EMAIL>”>"wangsheng"</a>
* @version 2018年12月31日 下午3:55:32
* @since 2.0
*/
public class Test {
}
<file_sep>/fly-demo-provider/src/main/java/com/data/fly/demo/provider/DemoServiceImpl.java
package com.data.fly.demo.provider;
import com.alibaba.dubbo.rpc.RpcContext;
import com.data.fly.demo.DemoService;
import com.data.fly.demo.Person;
/**
* TODO 请在此处添加注释
*
* @author <a href="mailto:"wangsheng"<EMAIL>”>"wangsheng"</a>
* @version 2018年7月11日 下午5:30:45
* @since 2.0
*/
public class DemoServiceImpl implements DemoService {
@Override
public String sayHello(String name) {
// // 本端是否为提供端,这里会返回true
// boolean isProviderSide = RpcContext.getContext().isProviderSide();
// // 获取调用方IP地址
// String clientIP = RpcContext.getContext().getRemoteHost();
// // 获取当前服务配置信息,所有配置信息都将转换为URL的参数
// String application = RpcContext.getContext().getUrl().getParameter("application");
// System.out.println(application);
System.out.println("被调用");
return "Hello123:" + name;
}
@Override
public String doSomething(Integer id, String addr) {
return id+":"+addr;
}
/**
* @param id
* @return
* @see com.data.fly.demo.DemoService#findById(java.lang.Integer)
*/
@Override
public Person findById(Integer id) {
Person p = new Person();
p.setName("康熙");
p.setAge(25);
p.setSex(1);
return p;
}
}
<file_sep>/fly-demo-myrpc/src/main/java/test/InvokerInvocationHandler.java
package test;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import io.netty.handler.codec.msgpack.TimeClient;
import io.netty.handler.codec.msgpack.TimeClientHandler;
import test.zk.ZkUtil;
/**
* TODO 请在此处添加注释
* @author <a href="mailto:"wangsheng"<EMAIL>”>"wangsheng"</a>
* @version 2018年11月29日 下午4:39:53
* @since 2.0
*/
public class InvokerInvocationHandler implements InvocationHandler {
private final Object invoker;
private String serviceName;
public InvokerInvocationHandler(Object handler,String serviceName){
this.invoker = handler;
this.serviceName = serviceName;
}
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
String methodName = method.getName();
System.out.println(methodName);
System.out.println(args[0]);
Class<?>[] parameterTypes = method.getParameterTypes();
if (method.getDeclaringClass() == Object.class) {
return method.invoke(invoker, args);
}
if ("toString".equals(methodName) && parameterTypes.length == 0) {
return invoker.toString();
}
if ("hashCode".equals(methodName) && parameterTypes.length == 0) {
return invoker.hashCode();
}
if ("equals".equals(methodName) && parameterTypes.length == 1) {
return invoker.equals(args[0]);
}
//TODO 发送至注册中心和调用
RequestMessage msg = new RequestMessage();
msg.setServiceName(serviceName);
msg.setMethodName(methodName);
msg.setArgs(JSON.toJSONString(args));
List<String> types = new ArrayList<>();
for(Class c:parameterTypes){
types.add(c.getName());
}
msg.setTypes(types);
String addr = ZkUtil.readProviderAddr();
if(StringUtils.isBlank(addr)){
return "zookeeper has no netty server info";
}
TimeClientHandler handler = new TimeClientHandler(msg);
TimeClient t = new TimeClient(addr.split(":")[0],Integer.valueOf(addr.split(":")[1]) , msg,handler);
t.run();
return handler.getData();
}
public static void main(String[] args) {
Object[] o = new Object[]{"你所说的是",1001,8.99};
String s = JSON.toJSONString(o);
System.out.println(s);
JSONArray a = JSONObject.parseArray(s);
int size = a.size();
for(int i=0;i<size;i++){
System.out.println(a.get(i));
}
}
}<file_sep>/fly-demo-myrpc/src/main/java/test/zk/ZkUtil.java
package test.zk;
import java.net.InetAddress;
import java.net.UnknownHostException;
import org.I0Itec.zkclient.ZkClient;
/**
* TODO 请在此处添加注释
* @author <a href="mailto:"wangsheng"<EMAIL>”>"wangsheng"</a>
* @version 2018年12月31日 下午3:28:08
* @since 2.0
*/
public class ZkUtil {
private static String path = "/myrpc/provider";
private static String addr = "127.0.0.1:2181";
public static void main(String[] args) {
ZkUtil z = new ZkUtil();
System.out.println(z.readProviderAddr()+".......");
}
public static void registerProvider(String address,Integer port){
ZkClient zkClient = new ZkClient(addr,5000);
if(!zkClient.exists(path)){
zkClient.createPersistent(path, true);
}
zkClient.writeData(path, address+":"+port);
}
public static String readProviderAddr(){
ZkClient zkClient = new ZkClient(addr,5000);
return zkClient.readData(path);
}
}
<file_sep>/fly-demo-myrpc/src/main/java/io/netty/handler/codec/msgpack/TimeClientHandler.java
package io.netty.handler.codec.msgpack;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.msgpack.template.Templates;
import org.msgpack.type.ArrayValue;
import org.msgpack.type.FloatValue;
import org.msgpack.type.IntegerValue;
import org.msgpack.type.MapValue;
import org.msgpack.type.RawValue;
import org.msgpack.type.Value;
import org.msgpack.unpacker.Converter;
import io.netty.channel.ChannelHandlerAdapter;
import io.netty.channel.ChannelHandlerContext;
import test.RequestMessage;
public class TimeClientHandler extends ChannelHandlerAdapter {
// private final int sendNumber;
private int counter;
private RequestMessage requestMessage;
private Object data;
// public TimeClientHandler(int sendNumber) {
// this.sendNumber = sendNumber;
// }
public TimeClientHandler(RequestMessage requestMessage) {
this.requestMessage = requestMessage;
}
@Override
public void channelActive(ChannelHandlerContext ctx) {
// User[] users = getUserArray(1);
// for (User user : users) {
// ctx.writeAndFlush(user);
// }
ctx.writeAndFlush(requestMessage);
}
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
Value val =(Value)msg;
setData(toObject(val));
System.out.println("This is " + ++counter + " times receive server : [" + msg + "]");
}
@Override
public void channelReadComplete(ChannelHandlerContext ctx) throws Exception {
ctx.flush();
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
cause.printStackTrace();
ctx.close();
}
private Object toObject(Value value) throws IOException{
Converter conv = new Converter(value);
if (value.isNilValue()) { // null
return null;
} else if (value.isRawValue()) { // byte[] or String or maybe Date?
// deserialize value to String object
RawValue v = value.asRawValue();
return conv.read(Templates.TString);
} else if (value.isBooleanValue()) { // boolean
return conv.read(Templates.TBoolean);
} else if (value.isIntegerValue()) { // int or long or BigInteger
// deserialize value to int
IntegerValue v = value.asIntegerValue();
return conv.read(Templates.TInteger);
} else if (value.isFloatValue()) { // float or double
// deserialize value to double
FloatValue v = value.asFloatValue();
return conv.read(Templates.TDouble);
} else if (value.isArrayValue()) { // List or Set
// deserialize value to List object
ArrayValue v = value.asArrayValue();
List<Object> ret = new ArrayList<Object>(v.size());
for (Value elementValue : v) {
ret.add(toObject(elementValue));
}
return ret;
} else if (value.isMapValue()) { // Map
MapValue v = value.asMapValue();
Map map = new HashMap<>(v.size());
for (Map.Entry<Value, Value> entry : v.entrySet()) {
Value key = entry.getKey();
Value val = entry.getValue();
map.put(toObject(key), toObject(val));
}
return map;
} else {
throw new RuntimeException("fatal error");
}
}
public Object getData() {
return data;
}
public void setData(Object data) {
this.data = data;
}
}
|
25e3fa43886fb7577e8cb6ada1563c7c06d50fb5
|
[
"Markdown",
"Java"
] | 11
|
Markdown
|
wangsheng332526/myrpc
|
9060b8766499fdfc1e26a9faa3eedbd7858868f3
|
2b08cca4cb9e24d43319552450ecda9df42cbb63
|
refs/heads/master
|
<repo_name>alexistbell/QuestionMarks<file_sep>/script.js
let QuestionMarks = str => {
let numberIndexes = [];
let numberValues = [];
for(c in str)
{
if(isNumber(str[c]))
{
numberIndexes.push(parseInt(c));
numberValues.push(parseInt(str[c]));
}
}
if(numberIndexes.length < 1)
{
return false;
}
var equalsTen = false;
for(var i = 0; i < numberIndexes.length -1; i++)
{
if(findValue(numberValues[i], numberValues[i+1])){
equalsTen = true;
var hasThree = str.slice((numberIndexes[i] + 1), numberIndexes[i+1]);
if(!threeQuestionsMarks(hasThree))
{
return false;
}
}
}
return equalsTen;
};
let isNumber = c => {
return(c*0 == 0);
};
let findValue = (n1, n2) =>{
return((n1+n2) === 10)
};
let threeQuestionsMarks = substr =>{
var counter = 0;
for(c in substr)
{
if(substr[c] === '?')
{
counter++;
}
}
return(counter === 3);
};
console.time('one');
console.log(QuestionMarks('bcc?7??ccc?3tt1??????5'));
console.timeEnd('one');
console.time('two');
console.log(QuestionMarks('bb6?9'));
console.timeEnd('two');
console.time('three');
console.log(QuestionMarks('arrb6???4xxbl5???eee5'));
console.timeEnd('three');
|
bfc50390587b37d5dfb1695423872e23f9de2244
|
[
"JavaScript"
] | 1
|
JavaScript
|
alexistbell/QuestionMarks
|
9081ea4a60911216dcf73b8bb7ec9a8cfaf8d4cf
|
38d0cf5c337c98c881f078ddc8032bd759ae30ac
|
refs/heads/master
|
<file_sep>public enum Status {
CONTINUE(100),
STOP(101);
private int code;
Status(int code) {
this.code = code;
}
}
<file_sep>public class Test2 {
public static void main(String[] args) {
System.out.println();
Test1 test = new Test1();
System.out.println(test.toString());
fn1("aa", 1, 1);
}
private static void fn1(String name, int age, int sex) {
System.out.println(name);
}
}
|
9ce75b731d8c2c7cf7550dcc1755678e6e08cd80
|
[
"Java"
] | 2
|
Java
|
ff200037/imooc
|
1924eea12c8fb2f8b4bdd8d8547d05ad5386e10c
|
5c536f39ddda079c8c036f7dd744d8740ce11ad4
|
refs/heads/master
|
<file_sep>var express = require('express');
var app = express();
var fs = require('fs');
var bodyParser = require('body-parser')
var compression = require('compression');
var topicRouter = require('./routes/topic');
var indexRouter =require('./routes/index');
// /topic으로 시작하는 주소들에 대해 미들웨어 topicRouter 를 제공하겠다.
app.use(express.static('public'));
// public폴더 내에서 static파일을 찾겠다.
app.use(bodyParser.urlencoded({
extended: false
}))
app.use(compression());
app.get('*',function (request, response, next) {
// * : 모든요청 / 만약 그냥 app.use로 썼다면 post 방식에 대해서도 작동하므로 비효율적임.
fs.readdir('./data', function (error, filelist) {
request.list = filelist;
next();
});
})
app.use('/topic', topicRouter);
app.use('/', indexRouter);
app.use(function(req,res,next){
res.status(404).send('Sorry cant find that!');
});
app.use(function(err, req,res,next){
// 4개의 인자를 가진 함수는 express에서 error를 핸들링하는 함수로 약속되어 있음.
res.status(500).send('Something broke!');
});
app.listen(3000, function () {
console.log('Example app listening on port 3000');
})
|
5bb09bbc8fc0a29b0ca466a2f5f83e5d5a8d5b7b
|
[
"JavaScript"
] | 1
|
JavaScript
|
dydrkfl/nodejs-master
|
99303571d41f0aac4be754a0f28d902041a755d8
|
af5ce39cd56f83bd667d4defa727303630143378
|
refs/heads/main
|
<file_sep>package com.logback;
import ch.qos.logback.classic.PatternLayout;
import ch.qos.logback.classic.spi.ILoggingEvent;
import ch.qos.logback.core.Layout;
import org.apache.commons.lang3.StringUtils;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class PatternMaskLayout extends PatternLayout {
private final static String MASK_REPLACE = "*疑似敏感信息*";
public void addMaskKey(String maskKey) {
System.out.println("maskKey: " + maskKey);
MaskUtil.maskPatterns.add(maskKey.concat("=(.*?)[,}]"));
MaskUtil.maskPatterns.add("\"".concat(maskKey).concat("\"").concat(":(.*?)[,}]"));
}
@Override
public String doLayout(ILoggingEvent event) {
return maskMessage(super.doLayout(event));
}
private String maskMessage(String msg){
Pattern pattern = MaskUtil.getMultilinePattern();
Matcher matcher = pattern.matcher(msg);
Set<String> sensitiveMessage = new HashSet<>();
while (matcher.find()){
sensitiveMessage.add(matcher.group(0));
}
if(sensitiveMessage.size() > 0){
for (String s : sensitiveMessage) {
if(StringUtils.isNoneBlank(s)){
String[] split = s.split(":|=");
String replacement = MASK_REPLACE;
if(split.length >= 2){
replacement = split[0].concat("=").concat(MASK_REPLACE).concat(",");
}
msg = msg.replace(s,replacement);
}
}
}
return msg;
}
}
|
cf41f7db5061e6eabd8ff25674134c60ebcb7cce
|
[
"Java"
] | 1
|
Java
|
micah-java/com-spring-logback
|
1fd8bcd007e66f0aaecaa72bf3e1d50fa08f989a
|
1e8576fb033c70a09ccdeeadae453c20e2784f56
|
refs/heads/master
|
<file_sep>const express = require('express');
const mongoose = require('mongoose');
const morgan = require('morgan')
const session = require('express-session');
const MongoStore = require('connect-mongo')(session);
const passport = require('passport');
const flash = require('connect-flash')
require('dotenv').config()
require('./config/passport')(passport);
// app requirements
const app = express();
app.use(morgan('dev'));
app.use(express.urlencoded({extended: true}));
app.use(express.static("public"));
app.set('view engine', 'ejs');
app.use(flash());
app.use(session({
secret: process.env.SEEK,
resave: false,
saveUninitialized: true,
store: new MongoStore({ mongooseConnection: mongoose.connection })
}));
// flash vars
app.use((req, res, next) => {
res.locals.success_msg = req.flash('success_msg');
res.locals.error_msg = req.flash('error_msg');
res.locals.error = req.flash('error');
res.locals.success = req.flash('success');
next();
})
app.use(passport.initialize());
app.use(passport.session());
const { dbConnection, db, options } = require('./utils/database-utils');
mongoose.connect(db, options, (err) => dbConnection(err));
const PORT = process.env.PORT || 5000;
// routes
const elizaRoute = require('./routes/elizaRoute');
const authRoute = require('./routes/authRoute');
app.use('/', authRoute);
app.use('/dashboard', elizaRoute);
app.listen(PORT, () => console.log(`listening on port ${PORT}`));<file_sep>const createList = document.querySelector('.create-list')
const createForm = document.querySelector('.list-name-creator')
const formAppear = (e) => {
e.preventDefault();
createForm.classList = 'list-name-creator-active';
};
const makeNewList = (e) => {
e.preventDefault()
const nameText = (document.querySelector('[id=name-of-list]').value);
console.log(nameText)
const theUrl = `/dashboard/${nameText}`
document.location.href = theUrl
};
createList.addEventListener('click', formAppear)
createForm.addEventListener('submit', makeNewList)<file_sep># Eliza
## Your essential life organizer
<!--  -->
## Created By
|[]
https://github.com/The4Star
|-----------|
| <NAME> |
Deployed Site: http://eliza-organiser.herokuapp.com/
## Purpose
Eliza is a simple organisation app that allows users to create their own custom lists of things that they need to do, it features secure authentication and login.
## Technologies used
* Node.js
* Express
* Javascript
* Mongoose
* MongoDB
* Bcrypt
* Passport.js
* Morgan
## Functionality and Features
The App features its own secure authentication using hashing and salt and cookie storage with passport.js. Once logged in the user organise their life with multiple lists that are stored in a mongoDB database.
<file_sep>const express = require('express');
const router = express.Router();
const {Item, List, itemArray} = require('../models/Item');
const User = require('../models/User')
const {ensureAuthenticated} = require('../config/auth')
router.use(express.static("public"));
router.get('/', ensureAuthenticated, async (req, res) => {
const user = await User.findById(req.user._id).populate('lists');
const userName = user.firstName;
const lists = user.lists;
res.render('dashboard', {userName, lists})
})
router.get('/:customList', ensureAuthenticated, async (req, res) => {
try {
const customList = req.params.customList.charAt(0).toUpperCase() + req.params.customList.slice(1).toLowerCase();
const user = req.user;
await List.findOne({name: customList, userId: user._id}, async (err, foundList) => {
if (err) {
throw err
};
if (!foundList) {
const list = await List.create({
name: customList,
items: itemArray,
userId: user._id
});
user.lists.push(list);
user.save();
res.redirect(`/dashboard/${customList}`);
} else {
res.render("list", {listTitle: customList, items: foundList.items});
};
});
} catch (error) {
console.log(error);
}
});
router.post('/listupdate', async (req, res) => {
try {
const itemName = req.body.newItem;
const listTitle = req.body.listTitle;
const user = req.user;
const newItem = new Item ({
name: itemName
});
await List.findOne({name:listTitle, userId: user._id},(err, foundList) => {
foundList.items.push(newItem);
foundList.save();
});
res.redirect(`/dashboard/${listTitle}`);
} catch (error) {
console.log(error);
};
});
router.post('/delete', async (req, res) => {
try {
const itemId = req.body.checkbox;
const listTitle = req.body.listTitle;
const user = req.user;
await List.findOneAndUpdate({name: listTitle, userId: user._id}, {$pull: {items: {_id: itemId}}}, (err, foundList) =>{
if (!err) {
res.redirect(`/dashboard/${listTitle}`)
};
});
} catch (error) {
console.log(error);
};
});
router.post('/delete-list', async (req, res) => {
try {
const listName = req.body.listName;
const user = await User.findById(req.user._id)
list = await List.findOne({name: listName})
await User.findOneAndUpdate({firstName:user.firstName}, {$pull: {lists: list._id}});
await List.deleteOne({name: list.name, userId: user._id})
res.redirect('/dashboard')
} catch (error) {
console.log(error);
};
});
module.exports = router<file_sep>const mongoose = require('mongoose')
const Schema = mongoose.Schema;
const itemSchema = new Schema({
name: {type: String, required: true}
});
const listSchema = new Schema({
name: {type: String, required: true},
items: [itemSchema],
userId: [{type: Schema.Types.ObjectId, ref: 'User'}]
});
const List = mongoose.model('List', listSchema);
const Item = mongoose.model('Item', itemSchema);
const item1 = new Item ({
name: 'Welcome to your new Eliza list'
});
const item2 = new Item ({
name: 'Click the plus to add an item'
});
const item3 = new Item ({
name: '<<< Click this to delete an item'
});
const itemArray = [item1, item2, item3];
module.exports = {
Item,
List,
itemArray,
itemSchema,
listSchema
};
|
8455c0190e76696ddf53cd53bf8f0c56860edbe1
|
[
"JavaScript",
"Markdown"
] | 5
|
JavaScript
|
The4star/eliza
|
713bc2223454beb4b75bf9db9087120eacd4d345
|
60c1820062da5cadc795a875f7afc9531f3e185d
|
refs/heads/master
|
<repo_name>rohegde7/javascript-Basics<file_sep>/functions.js
function fun_innerHtml(){
document.getElementById('text1').innerHTML="Function call Success!"
}
function fun_documentWrite(){
document.write("Habibi clear all")
}
function fun_windowAlert(){
window.alert("You just created an alert!")
}
function fun_consoleLog(){
var e = document.getElementById('text1').textContent;
console.log(e)
}
function fun_sumOfVariables(){
var sum = 10+20;
document.getElementById('variable_declaration').innerHTML="Sum = " +sum +".";
}
|
af1f2de851e5f20221c978b91a3c592455b68969
|
[
"JavaScript"
] | 1
|
JavaScript
|
rohegde7/javascript-Basics
|
e8397927fd12970a6d3f2f12537ff5ada9354a52
|
7d1dfac9c499ba77ec45066d6ad46bf604fdb0bf
|
refs/heads/master
|
<repo_name>lund0n/test-doubles<file_sep>/src/mock-fns.test.js
import React from 'react'
import { mount } from 'enzyme'
import TransformList from './transform-list'
// wraps the function in a call counter
const createMock = fn => {
const handler = (...args) => {
handler.count++
return fn.apply(null, args)
}
handler.count = 0
return handler
}
test('transform strings', () => {
const transformer = createMock(value => {
// The mock behavior
switch (value) {
case 0:
return 'none'
case 1:
return 'one'
default:
return 'some'
}
})
const component = mount(
<TransformList transform={transformer} items={[3, 1, 0, 4]} />
)
expect(transformer.count).toEqual(4)
const expected = ['some', 'one', 'none', 'some']
const results = component.find('li').map(node => node.text())
expect(results).toEqual(expected)
})
<file_sep>/src/App.js
import React, { Component } from 'react'
import RandomUser from './random-user'
import { getRandomUser } from './api'
class App extends Component {
render() {
return <RandomUser getUser={getRandomUser} />
}
}
export default App
<file_sep>/src/api.js
import axios from 'axios'
import { getRandomIntegerBetween } from './util'
const USER_LIST_SIZE = 10
const getRandomUserUrl = () =>
`https://jsonplaceholder.typicode.com/users/${getRandomIntegerBetween(
1,
USER_LIST_SIZE
)}`
export const getRandomUser = () =>
axios.get(getRandomUserUrl()).then(({ data }) => data)
<file_sep>/src/custom-mocks.test.js
import { getRandomLetter } from './letters'
// import words from 'words'
// jest.mock('words', () => require('identity-obj-proxy'), { virtual: true })
test.skip('letters', () => {
expect(getRandomLetter()).toEqual('J')
})
const words = {}
test.skip('where are the words?', () => {
expect(words.foo).toEqual('foo')
expect(words.bar).toEqual('bar')
expect(words.wat).toEqual('wat')
})
<file_sep>/src/transform-list.js
import React from 'react'
import { array, func } from 'prop-types'
const TransformList = ({ transform, items }) => (
<ul>{items.map((item, i) => <li key={i}>{transform(item)}</li>)}</ul>
)
TransformList.propTypes = {
items: array,
transform: func,
}
export default TransformList
<file_sep>/src/letters.js
import { getRandomIntegerBetween } from './util'
const A = 65
const Z = 91
console.log("I'm here to clutter your logs, dawg!")
export const getRandomLetter = () =>
String.fromCharCode(getRandomIntegerBetween(A, Z))
<file_sep>/src/random-user.test.js
import React from 'react'
import { mount } from 'enzyme'
import renderer from 'react-test-renderer'
import { ARTHUR_DENT, FORD_PREFECT } from './mock-users'
import RandomUser from './random-user'
test('Renders correct user', () => {
const mockedUserPromise = Promise.resolve(ARTHUR_DENT)
const getUser = jest.fn().mockReturnValue(mockedUserPromise)
const component = renderer.create(<RandomUser getUser={getUser} />)
return mockedUserPromise.then(() => {
expect(component.toJSON()).toMatchSnapshot()
})
})
test('Renders new user on button click', () => {
const user1 = Promise.resolve(ARTHUR_DENT)
const user2 = Promise.resolve(FORD_PREFECT)
const getUser = jest
.fn()
.mockReturnValueOnce(user1)
.mockReturnValue(user2)
const component = mount(<RandomUser getUser={getUser} />)
return user1
.then(() => {
component.update() // Y U no work w/o an update here?
expect(component.find('User').props()).toEqual(ARTHUR_DENT)
component
.update()
.find('button')
.simulate('click')
return user2
})
.then(() => {
component.update()
expect(component.find('User').props()).toEqual(FORD_PREFECT)
})
})
<file_sep>/README.md
# Test Doubles
An example of using Jest's mocking features in tests.
## Quick Start
```bash
$ git clone https://github.com/lund0n/test-doubles
$ cd test-doubles
$ yarn # or npm install if you're using that instead
$ yarn test # or npm test
```
## Presentation Agenda
* Types of test doubles
* Jest built-in test double support
* When to use the various types of test doubles
### Why Use a Test Double?
* They can make test setup easier, particularly when dealing with complex collaborators (collaborators that have a lot of configuration or make use of remote resources)
* They introduce flexibility into how you test. You can test something that relies on an API without implementing the API first.
* They help with code isolation by ensuring that you're only have to setup and test the current component/module.
* They can aid testing non-deterministic values in deterministic way. Temporal values (`new Date()`, `Date.now()`) and random values provide a particular challenge. Replacing a random call with a dice that always rolls "6" is easier work with.
* They can improve testing performance. If the code under test is calling a collaborator that contains time-intensive code, replacing it with a friendlier, lightweight version will make the code faster to test. Slow tests don't get executed frequently.
### Types of Test Doubles
1. *Stub*: a placeholder. Either does nothing, or returns the same thing every time. Can just be a regular function, or NOOP.
2. *Spy*: focused on tracking how many times a function’s been called, and what it’s called with.
3. *Mock*: it’s a spy with behavior. It tracks how it’s called, and either returns simple values (like a stub), or implements more complex behavior.
## List of Examples
* Using hand-rolled code:
* Stubs [link](https://github.com/lund0n/test-doubles/blob/master/src/stubs-fns.test.js)
* Spies [link](https://github.com/lund0n/test-doubles/blob/master/src/spy-fns.test.js)
* Mocks [link](https://github.com/lund0n/test-doubles/blob/master/src/mock-fns.test.js)
* Using Jest mocks:
* Stubs [link](https://github.com/lund0n/test-doubles/blob/master/src/stubs-jest.test.js)
* Spies [link](https://github.com/lund0n/test-doubles/blob/master/src/spy-jest.test.js)
* Mocks [link](https://github.com/lund0n/test-doubles/blob/master/src/mock-jest.test.js)
* Complex Jest mock (using Promises, multiple return values) [link](https://github.com/lund0n/test-doubles/blob/master/src/random-user.test.js)
* Jest auto-mock feature [link](https://github.com/lund0n/test-doubles/blob/master/src/api.test.js)
* Jest virtual auto-mocking [link](https://github.com/lund0n/test-doubles/blob/master/src/custom-mocks.test.js)
*NOTE*: some of the examples may not be completed, as they are demonstrated as part of the training presentation.
<file_sep>/src/stubs-fns.test.js
import React from 'react'
import { mount } from 'enzyme'
import OnMount from './on-mount'
import ListOfItems from './list-of-items'
test('renders children correctly', () => {
const noop = () => {}
const expected = <div>Hello</div>
// if onMount is not passed, this will fail.
const component = mount(<OnMount onMount={noop}>Hello</OnMount>)
expect(component.contains(expected)).toBe(true)
})
test('renders the list using stubbed getItems', () => {
const getItems = () => ['Larry', 'Mo', 'Curly']
const component = mount(<ListOfItems getItems={getItems} />)
expect(component.find('li')).toHaveLength(3)
})
<file_sep>/src/random-user.js
import React, { Component } from 'react'
import { func } from 'prop-types'
import User from './user'
export default class RandomUser extends Component {
static propTypes = {
getUser: func.isRequired,
}
state = {
id: 0,
name: '',
username: '',
email: '',
}
updateUser = () => {
this.props.getUser().then(({ id, name, username, email }) => {
this.setState({ id, name, username, email })
})
}
componentDidMount() {
this.updateUser()
}
render() {
const { id, name, username, email } = this.state
return (
<div>
<button onClick={this.updateUser}>Get User</button>
<User id={id} name={name} username={username} email={email} />
</div>
)
}
}
<file_sep>/src/on-mount.js
import React, { Component } from 'react'
import { func, node } from 'prop-types'
export default class OnMount extends Component {
static propTypes = {
children: node.isRequired,
onMount: func.isRequired,
}
componentDidMount() {
this.props.onMount()
}
render() {
return <div>{this.props.children}</div>
}
}
<file_sep>/src/spy-fns.test.js
import React from 'react'
import { mount } from 'enzyme'
const createClickHandler = () => {
const clickHandler = () => {
clickHandler.count++
}
clickHandler.count = 0
return clickHandler
}
test('tracks clicks', () => {
const clickHandler = createClickHandler()
const component = mount(<button onClick={clickHandler}>click me</button>)
component.simulate('click')
component.simulate('click')
component.simulate('click')
expect(clickHandler.count).toEqual(3)
})
|
7f9f41283c28e8367a02ad87e4dbf64a46820214
|
[
"JavaScript",
"Markdown"
] | 12
|
JavaScript
|
lund0n/test-doubles
|
7819411d5d0139e0dfe6c6d6f397c5ce544947a7
|
23ce12d7f47afbde416089286c1224786ce916a0
|
refs/heads/master
|
<file_sep>describe("dummy", () => {
it("test 1", () => {
expect(1 + 1).toEqual(2);
});
});
<file_sep>import React from "react";
import Slider from "@material-ui/core/Slider";
import { withStyles } from "@material-ui/core";
const EnhancedSlider = withStyles({
root: {
height: 4
}
})(Slider);
export default EnhancedSlider;
<file_sep>"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.default = void 0;
var _react = _interopRequireDefault(require("react"));
var _Slider = _interopRequireDefault(require("@material-ui/core/Slider"));
var _core = require("@material-ui/core");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
var EnhancedSlider = (0, _core.withStyles)({
root: {
height: 4
}
})(_Slider.default);
var _default = EnhancedSlider;
exports.default = _default;<file_sep>import React, { Component } from "react";
import PropTypes from "prop-types";
import { withStyles } from "@material-ui/core";
import Button from "@material-ui/core/Button";
import Checkbox from "@material-ui/core/Checkbox";
import Chip from "@material-ui/core/Chip";
import Dialog from "@material-ui/core/Dialog";
import DialogActions from "@material-ui/core/DialogActions";
import DialogContent from "@material-ui/core/DialogContent";
import DialogTitle from "@material-ui/core/DialogTitle";
import FormControl from "@material-ui/core/FormControl";
import FormGroup from "@material-ui/core/FormGroup";
import FormControlLabel from "@material-ui/core/FormControlLabel";
import Typography from "@material-ui/core/Typography";
import ChipInput from "material-ui-chip-input";
import { FieldArray } from "react-final-form-arrays";
import AddCircleRoundedIcon from "@material-ui/icons/AddCircleRounded";
import styles from "./styles";
import SearchBar from "./SearchBar";
class ListPickerComponent extends Component {
constructor(props) {
super(props);
this.state = {
selected: [],
isOpen: false,
currentPage: 0,
searchValue: ""
};
}
/**
* Change popup state (open or close)
* @param isOpen : boolean
*/
setPopup = isOpen => {
console.log(this.props.fields.value, " props");
console.log(this.state.selected, " state");
if (!this.state.isOpen && isOpen) {
this.setState({
selected: this.props.fields.value || []
});
}
this.setState({
isOpen
});
};
/**
* Checks if key is already selected
* @param key : string
*/
isSelected = key =>
this.state.selected.filter(data => data === key).length !== 0;
/**
* Handles checkbox event and adds / removes key from selected state
* @param key : string
*/
handleCheckboxChange = key => event => {
if (this.props.isMulty) {
this.setState({
selected: event.target.checked
? [...this.state.selected, key]
: this.state.selected.filter(data => data !== key)
});
} else {
this.setState({
selected: this.isSelected(key) ? [] : [key]
});
}
};
/**
* Handle chip delete event
* @param chip : string
*/
handleChipDelete = chip =>
this.setState({
selected: this.state.selected.filter(data => data !== chip)
});
/**
* Add all selected items to the form fields value
* then sends all the selected values to the Final Form parent container
*/
handleSubmit = () => {
if (this.props.fields.value) {
for (let index = 0; index < this.props.fields.value.length; index++) {
this.props.fields.remove(0);
}
}
this.state.selected.forEach(key => {
this.props.fields.push(key);
});
this.setState({
isOpen: false
});
// this.state.selected.foreach(key => console.log(key));
};
/**
* Clear all selected values
*/
handleReset = () => {
if (this.props.fields.value) {
for (let i = 0; i < this.props.fields.value.length; i++) {
this.props.fields.remove(0);
}
}
this.setState({
selected: [],
isOpen: true
});
};
/**
* Update search state
* @param searchValue : string | user search input
*/
onSearchChange = searchValue =>
this.setState({
searchValue
});
/**
* Filters values displayed to the user (checkboxes)
* First filters by search, and the applies pagnation filtering
* @param data : string | all data
* @param pageBreak : number | items allowed per page
*/
filterValues = (data, pageBreak) => {
return data
.filter(
key =>
key.toLowerCase().includes(this.state.searchValue.toLowerCase()) ||
this.state.searchValue === ""
)
.filter((x, i) => {
if (!pageBreak) {
return true;
}
if (
i >= pageBreak * this.state.currentPage &&
i <= pageBreak * this.state.currentPage + pageBreak - 1
) {
return true;
}
return false;
});
};
/**
* Returns the length of data after search filtering
* @param data : string | all data
*/
currentDataLength = data =>
data.filter(
key =>
key.toLowerCase().includes(this.state.searchValue.toLowerCase()) ||
this.state.searchValue === ""
).length;
render() {
const {
classes,
data,
title = "Select fields",
buttonText = "Select"
} = this.props;
let { pageBreak = 0 } = this.props;
if (data.length >= 50) {
pageBreak = 10;
}
const totalPages = pageBreak
? Math.ceil(this.currentDataLength(data) / pageBreak)
: 1;
return (
<React.Fragment>
{/* Popup Initiator */}
<div>
<Button
variant="contained"
color="primary"
className={classes.button}
endIcon={<AddCircleRoundedIcon />}
onClick={() => this.setPopup(true)}
style={{ marginRight: "10px" }}
>
{buttonText}
</Button>
{this.props.fields.value &&
this.props.fields.value.map(val => (
<Chip
label={val}
color="secondary"
clickable
style={{ margin: "0 5px" }}
/>
))}
</div>
{/* Content */}
<Dialog
open={this.state.isOpen}
aria-labelledby="form-dialog-title"
fullWidth="md"
>
<DialogTitle id="form-dialog-title">{title}</DialogTitle>
<SearchBar onSearchChange={this.onSearchChange} />
<DialogContent>
{/* Checkbox list */}
<FormGroup className={classes.itemsWrapper}>
<FormControl component="fieldset" className={classes.formControl}>
{this.filterValues(data, pageBreak).map(key => (
<FormControlLabel
control={
<Checkbox
color="primary"
value={key}
onChange={this.handleCheckboxChange(key)}
checked={this.isSelected(key)}
/>
}
label={key}
/>
))}
</FormControl>
</FormGroup>
<FormGroup>
{/* Chips */}
<ChipInput
value={this.state.selected}
onDelete={chip => this.handleChipDelete(chip)}
fullWidth
/>
</FormGroup>
</DialogContent>
<DialogActions>
<Button
disabled={!pageBreak || this.state.currentPage === 0}
onClick={() =>
this.setState({
currentPage: this.state.currentPage - 1
})
}
>
{"<"}
</Button>
<Typography>
{this.state.currentPage + 1} of {totalPages}
</Typography>
<Button
disabled={!pageBreak || this.state.currentPage >= totalPages - 1}
onClick={() =>
this.setState({
currentPage: this.state.currentPage + 1
})
}
>
{">"}
</Button>
</DialogActions>
<DialogActions>
<Button
onClick={() => this.setState({ isOpen: false })}
color="default"
>
Close
</Button>
<Button onClick={this.handleReset} color="default">
Clear
</Button>
<Button
onClick={this.handleSubmit}
variant="contained"
color="primary"
>
Save
</Button>
</DialogActions>
</Dialog>
</React.Fragment>
);
}
}
ListPickerComponent.propTypes = {
name: PropTypes.string.isRequired,
data: PropTypes.arrayOf(PropTypes.string).isRequired,
onSubmit: PropTypes.func,
isMulty: PropTypes.bool,
fields: PropTypes.any,
title: PropTypes.string,
buttonText: PropTypes.string,
pageBreak: PropTypes.number
};
function ListPicker({ ...restProps }) {
return (
<FieldArray
component={withStyles(styles)(ListPickerComponent)}
{...restProps}
/>
);
}
ListPicker.propTypes = {
name: PropTypes.string.isRequired,
data: PropTypes.arrayOf(PropTypes.string).isRequired,
isMulty: PropTypes.bool,
title: PropTypes.string,
buttonText: PropTypes.string,
pageBreak: PropTypes.number
};
export default ListPicker;
<file_sep># react-list-picker
##### This project provides a List Picker Form field for [Material-UI][mui] and [React-Final-Form][rff]. The purpose is to have a compelling dymanic way for a user to select input in a form.

## Installation
### step 1:
```shell
npm i --save react-list-picker
```
### step 2:
## Installation (peer dependancies) - a must for it to work!
`npm i --save final-form react-final-form final-form-arrays react-final-form-arrays @material-ui/core`
**Note:** This is the version for Material-UI 1.0.0 or later.
## Usage
This is a simple example showing how `react-list-picker` integrates with `react-final-forms` and with `material-ui`.
```javascript
import React from "react";
import ListPicker from "react-list-picker";
import { Form } from "react-final-form";
import { createMuiTheme, MuiThemeProvider } from "@material-ui/core/styles";
import arrayMutators from "final-form-arrays";
const myOptions = ["Meat Lover", "Veggie Heaven", "Hawaii-5-0", "Inferno"];
let output = [];
const onSubmit = values => {
output = values.crazyList;
};
const theme = createMuiTheme({
palette: {
primary: { main: "#333" },
secondary: { main: "#000" }
},
status: {
danger: "orange"
}
});
function App() {
return (
<MuiThemeProvider theme={theme}>
<Form
onSubmit={onSubmit}
mutators={{
...arrayMutators // super important to include!!!
}}
render={({ handleSubmit, ...rest }) => (
<form onSubmit={handleSubmit} style={{ padding: "50px" }}>
<ListPicker
name="crazyList"
data={myOptions}
isMulty
title="My crazy list"
buttonText="React List Picker"
/>
<hr />
<button type="submit">Submit</button>
{output.map(val => (
<h6>{val}</h6>
))}
</form>
)}
/>
</MuiThemeProvider>
);
}
export default App;
```
## Properties
| Name | Required | Type | Default | Description |
| ------------ | ------------------------------- | ---------- | ----------------- | --------------------------------------- |
| `name` | yes | `string` | | Field name for final form |
| `data` | yes | `string[]` | | The input to select from |
| `isMulty` | no | `boolean` | `false` | Allow selecting multiple values |
| `title` | no | `string` | `"Select fields"` | Title shown on the ListPicker element |
| `buttonText` | no | `string` | `"Select"` | Text value of the submit changes button |
## License
The files included in this repository are licensed under the MIT license.
[rff]: https://github.com/final-form/react-final-form
[mui]: http://www.material-ui.com/#/
<file_sep>import * as React from "react";
export interface ListPickerProps {
name: string;
data: string[];
isMulty?: boolean;
title?: string;
buttonText?: string;
pageBreak?: number;
[x: string | number]: any;
}
declare const ListPicker: React.ComponentType<ListPickerProps>;
export default Index;
|
2066cfa33b6807be183e8c6d5a1d195e785a5dc0
|
[
"JavaScript",
"TypeScript",
"Markdown"
] | 6
|
JavaScript
|
Eylon-sargon/react-list-picker
|
cc1145493353f01ffb0a3e99e6b2e27e92189908
|
11daaa13c082036fb0e344b30074ca1b3888cffd
|
refs/heads/master
|
<file_sep>package com.demo2;
import java.util.*;
public class demo_hashmap {
public static void main(String[] args)
{
HashMap hm = new HashMap();
Emp emp1 = new Emp("s001","name3",20, 6.2f);
Emp emp2 = new Emp("s002","name2",21, 7.2f);
Emp emp3 = new Emp("s003","name1",22, 4.2f);
hm.put("s001", emp1);//HashMap添加元素,hm.put(key,value);
//key值自己定义,value为要添加的对象
hm.put("s002", emp2);
hm.put("s003", emp3);//HashMap不能添加重复的元素,如果添加,会被覆盖,只能有唯一的key值
System.out.println(hm.size());//虽然添加了3次,但是结果输出2,编号s002的value值被覆盖
//hashMap查找key= s002的对象值
hm.containsKey("s002");//输出结果为boolean值,存在为true,否则为false;
if(hm.containsKey("s002")){
Emp temp = (Emp)hm.get("s002");
System.out.println(temp.getAge());
}
//hashMap的遍历,Iterator
Iterator it = hm.keySet().iterator();
//it.hasNext();
while(it.hasNext()){
String key = it.next().toString();
Emp temp = (Emp)hm.get(key);
System.out.println(temp.getName());
System.out.println(temp.getSal());
}
}
}
class Emp
{ private String empNum;
private String name;
private int age;
private float sal;
public Emp(String empNum, String name,int age ,float sal)
{
this.empNum = empNum;
this.name = name;
this.age = age;
this.sal = sal;
}
public String getEmpNum() {
return empNum;
}
public void setEmpNum(String empNum) {
this.empNum = empNum;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public int getAge() {
return age;
}
public void setAge(int age) {
this.age = age;
}
public float getSal() {
return sal;
}
public void setSal(float sal) {
this.sal = sal;
}
}
<file_sep>/**
* 多种布局管理器的使用
*/
package com.demo3;
import java.awt.*;
import javax.swing.*;
public class demo_gui_4 extends JFrame {
JPanel jp1,jp2;
JButton jb1,jb2,jb3,jb4,jb5,jb6;
public static void main(String[] args)
{
demo_gui_4 demo_gui_4 = new demo_gui_4();
}
public demo_gui_4()
{
//创建组件
//JPanel布局默认是FlowLayout;
jp1 = new JPanel();
jp2 = new JPanel();
jb1 = new JButton("西瓜");
jb2 = new JButton("苹果");
jb3 = new JButton("荔枝");
jb4 = new JButton("葡萄");
jb5 = new JButton("桔子");
jb6 = new JButton("香蕉");
jp1.add(jb1);
jp1.add(jb2);
jp2.add(jb3);
jp2.add(jb4);
jp2.add(jb5);
//把Panel加入JFrame;
this.add (jp1,BorderLayout.NORTH);
//把香蕉放在中间位置;
this.add(jb6,BorderLayout.CENTER);
//把panel2放在南边;
this.add(jp2, BorderLayout.SOUTH);
this.setTitle("复杂布局");
this.setSize(300, 200);
this.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
this.setVisible(true);
}
}
<file_sep>package com.demo3;
import java.awt.*;
import javax.swing.*;
public class demo_gui_2 extends JFrame{
JButton jb1,jb2,jb3,jb4,jb5,jb6;
public static void main(String[] args)
{
demo_gui_2 demo_gui_2 = new demo_gui_2();
}
public demo_gui_2()
{
jb1 = new JButton("jb1");
jb2 = new JButton("jb2");
jb3 = new JButton("jb3");
jb4 = new JButton("jb4");
jb5 = new JButton("jb5");
jb6 = new JButton("jb6");
this.add(jb1);
this.add(jb2);
this.add(jb3);
this.add(jb4);
this.add(jb5);
this.add(jb6);
//流式布局默认是居中对齐的
//this.setLayout(new FlowLayout(这里可以设置属性设置对齐方式));
this.setLayout(new FlowLayout(FlowLayout.LEFT));//左对齐
this.setTitle("FlowLayout");
this.setSize(300, 200);
this.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
this.setVisible(true);
}
}
<file_sep>package com.demo1;
public class demo {
public static void main(String []args)
{
}
}
interface Fish
{
public void swimming();
}
interface Bird
{
public void flying();
}
class Monkey
{
int age;
public void jump()
{
System.out.println("tiaotiaotiao");
}
}
class LittleMonkey extends Monkey implements Fish,Bird
{
@Override
public void swimming() {
// TODO Auto-generated method stub
}
@Override
public void flying() {
// TODO Auto-generated method stub
}
}
<file_sep>package com.demo2;
import java.lang.reflect.Method;
import java.io.*;
public class demo_fanxing {
public static void main(String[] args)
{
FileReader fr = null;
try{
//执行语句;
fr = new FileReader("D:\\test.txt");
}
catch(Exception e){
e.printStackTrace();
System.out.println(e.getMessage());
}
finally{
// 这个语句块,不管有没有异常都会执行;
//一般说把需要关闭的资源,比如文件,开
//的链接,开辟的内存,就放到这个块中
if(fr !=null){
try{
fr.close();
}catch(Exception e){
System.out.println(e.getMessage());
}
}
}
//基本数据类型
Gen<String> gen1 = new Gen<String>("aa");
// gen1.showTypeName();
Gen<Integer> gen2 = new Gen<Integer>(1);
//gen2.showTypeName();
//类
Gen<Bird> gen3 = new Gen<Bird>(new Bird());
gen3.showTypeName();
}
}
//定义一个bird类
class Bird
{
public void show()
{
System.out.println("aa");
}
public void count(int a,int b)
{
System.out.println(a+b);
}
}
//泛型
class Gen<T>
{
private T o;
//构造函数
public Gen(T a)
{
o = a;
}
//成员方法
public void showTypeName()
{
System.out.println("类型是"+ o.getClass().getName());
//通过反射机制,我们可以得到T这个类型的很多信息,比如:函数名称,返回类型,成员变量,成员方法
//比如得到成员函数的函数名
Method []m = o.getClass().getDeclaredMethods();
//System.out.println(m[0].getClass());
for(int i=0;i<m.length;i++)
{
System.out.println(m[i].getName());
}
}
}
<file_sep>package com.demo3;
import java.awt.*;
import javax.swing.*;
public class demo_gui_5 extends JFrame {
JPanel jp1,jp2,jp3;
JButton jb1,jb2;
JLabel jl1,jl2;
JTextField jtf;
JPasswordField jpf;
public static void main(String[] args)
{
demo_gui_5 demo_gui_5 = new demo_gui_5();
}
public demo_gui_5()
{
jp1 = new JPanel();
jp2 = new JPanel();
jp3 = new JPanel();
jb1 = new JButton("确定");
jb2 = new JButton("取消");
jl1 = new JLabel("用户名");
jl2 = new JLabel("密 码");
jtf = new JTextField(10);
jpf = new JPasswordField(10);
jp1.add(jl1);
jp1.add(jtf);
jp2.add(jl2);
jp2.add(jpf);
jp3.add(jb1);
jp3.add(jb2);
this.setLayout(new GridLayout(3,1));
this.add(jp1);
this.add(jp2);
this.add(jp3);
this.setTitle("复杂布局");
this.setSize(300, 200);
this.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
this.setVisible(true);
}
}
<file_sep>package com.demo3;
import java.awt.*;
import javax.swing.*;
public class demo_gui_8 extends JFrame {
MyPanel mp= null;
public static void main(String []args)
{
demo_gui_8 demo_gui_8 = new demo_gui_8();
}
public demo_gui_8()
{
mp = new MyPanel();
this.add(mp);
this.setSize(400,300);
this.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
this.setVisible(true);
}
//定义一个面板,用于绘图和实现绘图的区域
class MyPanel extends JPanel
{
//覆盖JPanel的paint方法
//Graphics是绘图的重要类,可以理解成一只画笔
public void paint(Graphics g)
{
//调用父类函数完成初始化
super.paint(g);
//画图
g.drawOval(10, 10, 100, 100);
}
}
}
|
989d342143b36437e2bbb7010f7d997d46c031be
|
[
"Java"
] | 7
|
Java
|
ilikecandice/TankGame-java
|
a1027740601263edf2219661af96720ea6f1abf5
|
484e4333b8cf81ee3a5726f6d04c29602d17307c
|
refs/heads/master
|
<file_sep>export class Role {
// cách viết tắt khai bao biến trên TypeScript
constructor(public id: number, public name: string) {}
}
export const ROLES: Role[] = [
new Role(1, 'admin'),
new Role(2, 'moderate'),
new Role(3, 'User'),
new Role(4, 'Support'),
];
<file_sep>import { AppRoutingModule } from './app.routes';
import { BrowserModule } from '@angular/platform-browser';
import { NgModule } from '@angular/core';
import { FormsModule } from '@angular/forms';
//
import { AppComponent } from './app.component';
import { BindingComponent } from './binding/binding.component';
import { Binding2Component } from './binding2/binding2.component';
import { NgForComponent } from './ng-for/ng-for.component';
import { HtmlAttributeComponent } from './html-attribute/html-attribute.component';
import { NgClassComponent } from './ng-class/ng-class.component';
import { NgStyleComponent } from './ng-style/ng-style.component';
import { NgIfComponent } from './ng-if/ng-if.component';
import { NgSwitchComponent } from './ng-switch/ng-switch.component';
import { Binding2RadioComponent } from './binding2-radio/binding2-radio.component';
import { InputKeyupComponent } from './input-keyup/input-keyup.component';
import { Binding2SelectionComponent } from './binding2-selection/binding2-selection.component';
import { RouterModule } from '@angular/router';
import { TextEditorComponent } from './text-editor/text-editor.component';
import { DiaglogComponent } from './diaglog/diaglog.component';
@NgModule({
declarations: [
AppComponent,
BindingComponent,
Binding2Component,
NgForComponent,
HtmlAttributeComponent,
NgClassComponent,
NgStyleComponent,
NgIfComponent,
NgSwitchComponent,
Binding2RadioComponent,
InputKeyupComponent,
Binding2SelectionComponent,
TextEditorComponent,
DiaglogComponent,
],
imports: [BrowserModule, FormsModule, RouterModule, AppRoutingModule],
providers: [],
bootstrap: [AppComponent],
})
export class AppModule {}
<file_sep>import { Component, OnInit } from '@angular/core';
@Component({
selector: 'app-binding2',
templateUrl: './binding2.component.html',
styleUrls: ['./binding2.component.scss']
})
export class Binding2Component {
name: string = '';
email: string ='';
password: string = '';
address:string='';
setName() {
this.name = 'Nancy';
}
setEmail(){
this.email = "<EMAIL>";
}
setPassword(){
this.password = "<PASSWORD>";
}
setUppercaseName(address:string){
this.address = address.toUpperCase();
}
}
<file_sep>export class Hero {
id: number;
name: string;
emotion: string;
constructor(id: number, name: string, emotion: string) {
this.id = id;
this.name = name;
this.emotion = emotion;
}
}
export const heroes: Hero[] = [
new Hero(1, 'hungbeo', 'happy'),
new Hero(2, 'Hunggay', 'sad'),
new Hero(3, 'Long', 'confused'),
new Hero(4, 'Thao', 'Magneta')
];
<file_sep>import { Component, OnInit } from '@angular/core';
@Component({
selector: 'app-ng-class',
templateUrl: './ng-class.component.html',
styleUrls: ['./ng-class.component.scss']
})
export class NgClassComponent implements OnInit {
/**
* ngClass
*/
isSaveable = true;
isSpecial = true;
isModified = true;
currentClasses: {};
ngOnInit(): void {
this.setCurrentClasses();
}
setCurrentClasses() {
// JSON
this.currentClasses = {
'saveable': this.isSaveable,
'modified': this.isModified,
'special': this.isSpecial
};
}
}
<file_sep>import { Component, OnInit } from '@angular/core';
@Component({
selector: 'app-html-attribute',
templateUrl: './html-attribute.component.html',
styleUrls: ['./html-attribute.component.scss'],
})
export class HtmlAttributeComponent implements OnInit {
url =
'https://thepowerofsmiling.com/wp-content/uploads/2014/11/Smile-Icon.png';
isDisable: boolean; // khởi tạo ở đây ko có ý nghĩa
isSpecial: boolean;
constructor() {}
ngOnInit(): void {
// phải khởi tạo ở đây sau khi html DOM đã đc khởi tạo
this.isDisable = true;
this.isSpecial = true;
}
}
<file_sep>import { Component, OnInit } from '@angular/core';
import { Hello } from './Hello';
@Component({
selector: 'app-binding',
templateUrl: './binding.component.html',
styleUrls: ['./binding.component.scss']
})
export class BindingComponent implements OnInit {
// biến này đc gọi ở template html
title: string = 'binding.Component here:';
numberOfClick: number= 0;
user:Hello = new Hello(1,'Hungbeo', 40);
// function sẽ đc gọi ở template
public clickButton(){
this.numberOfClick++;
}
constructor() { }
ngOnInit(): void {
}
}
<file_sep>import { Component, OnInit } from '@angular/core';
import { Customer } from './Customer';
@Component({
selector: 'app-ng-for',
templateUrl: './ng-for.component.html',
styleUrls: ['./ng-for.component.scss'],
})
export class NgForComponent implements OnInit {
items: Customer[] = [
new Customer(1, 'hung', 14),
new Customer(2, 'khien', 14),
new Customer(3, 'kien', 14),
];
newItems: Customer[] = [
new Customer(1, 'hungChanged', 15),
new Customer(2, 'khien Ko changeId', 16),
new Customer(5, 'kienChanged', 14),
];
customers: Customer[];
custome1: Customer = new Customer(1, '<NAME>', 8);
constructor() {}
ngOnInit(): void {
this.customers = this.items;
}
clickButton(name: string) {
window.alert(name);
}
resetList() {
this.customers = this.items;
}
changeNewArray() {
this.customers = this.newItems;
}
changeItemOfArray() {
// ngay cả change thành phần của biến vẫn đc update vào html (lạ thật)
this.customers[0].name = 'aaaaa';
this.customers[2].name = 'bbbbb';
}
/**
*
* @param index : là vị trí trong array của *ngFor
* @param customer: là object của array tại vị trí index
*/
trackById(index: number, customer: Customer): number {
return customer.id;
}
/**
*
* @param index : là vị trí trong array của *ngFor
* @param customer: là object của array tại vị trí index
*/
trackByName(index: number, customer: Customer): string {
return customer.name;
}
}
<file_sep><div class="binding2-component-layout">
<h1>binding2.component: binding 2 way</h1>
<p style="color: red">cách 1: gộp @input [] và @output () vào làm 1</p>
<p>
<span>Value: {{ name }}</span>
<br />
<label for="example-ngModel">[(ngModel)]:</label>
<!-- two way binding = @input [] and @output () -->
<!-- <input> là trường hợp đặc biệt mới viết gọn thế này kết hợp () và [] -->
<!-- cân import FormsModule vào trong @ngModule mới đc -->
<!-- id: dung cho viec truy cap tu DOM hoac JQuery -->
<input type="text" [(ngModel)]="name" id="example-ngModel" />
<button (click)="setName()" class="button">Set value</button>
</p>
<p style="color: red">
tách @input [] và @ouput () có cái lợi là ta có thể bắt đc $event từ @output
</p>
<p>
<span>Value: {{ email }}</span>
<br />
<label for="without">without NgModel:</label>
<!-- @input = [], @output = () -->
<!-- $event = @output data cua input tag -->
<!-- $event: là local variable nên chỉ có tác dụng trong double quote "" -->
<!-- (input): là @output () -->
<!-- value: là @input [] -->
<input [value]="email" (input)="email = $event.target.value" id="without" />
<button (click)="setEmail()" class="button">Set value</button>
</p>
<p style="color: red">dùng 2 cách dưới đây là hay nhất: [ngModel] và (ngModelChange)</p>
<p>
<span>Value: {{ password }}</span>
<br />
<label for="example-change">(ngModelChange)="...name=$event":</label>
<!-- @input = [], @output = () -->
<!-- $event = @output data cua input tag -->
<!-- () = "javaExpression" se dc trigger khi co @output event. ko dung $event van dc -->
<input [ngModel]="password" (ngModelChange)="password = $event" id="example-change" />
<button (click)="setPassword()" class="button">Set value</button>
</p>
<p>
<span>Value: {{ address }}</span>
<br />
<label for="example-uppercase">(ngModelChange)="setUppercaseName($event)"></label>
<!-- @input = [], @output = () -->
<!-- $event = @output data cua input tag -->
<!-- () = "javaExpression" se dc trigger khi co @output event. ko dung $event van dc -->
<!-- (ngModelChange) là @output của ngModel directive -->
<!-- phải có ngModel hoặc [ngModel] thì (ngModelChange) mới work-->
<input [ngModel]="address" (ngModelChange)="setUppercaseName($event)" id="example-uppercase" />
<button (click)="address = 'Hanoi'" class="button">Set value</button>
</p>
<p style="color: red">
@Output () event chỉ đc kích hoạt khi có sự thay đổi ở bên trong component input
<br />
thay đổi component từ bên ngoài sẽ ko kích hoạt @output () event
<br />
vd: click button là thay đổi từ bên ngoài nên setUppercaseName($event) ko đc gọi
</p>
</div>
<file_sep>export class User{
name:string;
email:string;
constructor(name:string, email:string){
this.name = name;
this.email = email;
}
}
<file_sep>import { Component, OnInit } from '@angular/core';
@Component({
selector: 'app-ng-style',
templateUrl: './ng-style.component.html',
styleUrls: ['./ng-style.component.scss']
})
export class NgStyleComponent implements OnInit {
islargeFont:boolean; // khởi tạo ở đây sẽ ko đc load vào DOM
isHidden:boolean;
constructor() { }
/**
* Hàm này đc gọi sau khi html DOM đc create
*/
ngOnInit(): void {
// khởi tạo ở đây sẽ sinh event để Update DOM có liên quan tới biến này
this.islargeFont = true;
this.isHidden = false;
}
}
<file_sep>import { Component, OnInit } from '@angular/core';
@Component({
selector: 'app-ng-if',
templateUrl: './ng-if.component.html',
styleUrls: ['./ng-if.component.scss']
})
export class NgIfComponent implements OnInit {
// *ngIf
condition:boolean; //khởi tạo ở đây sẽ ko update đc biến vào html
condition2:boolean;
condition3:boolean;
// show/hidden
isHidden:boolean;
constructor() { }
ngOnInit(): void {
// khởi tạo ở đây sau khi DOM đc load => thì mới update đc vào html
this.condition = true;
this.condition2 = true;
this.condition3 = true;
this.isHidden = true;
}
}
<file_sep>import { Component, OnInit } from '@angular/core';
@Component({
selector: 'app-input-keyup',
templateUrl: './input-keyup.component.html',
styleUrls: ['./input-keyup.component.scss']
})
export class InputKeyupComponent implements OnInit {
enterMessage: string = '';
blurMessage:string = '';
constructor() { }
ngOnInit(): void {
}
onEnter(message: string) { // with type info
this.enterMessage = message;
}
/**
* sẽ đc goi khi Edit input từ focus chuyển sang unfocus by clicking elsewhere
* @param message
*/
onBlur(message: string) { // with type info
this.blurMessage = message;
}
}
<file_sep>import { Component, OnInit } from '@angular/core';
@Component({
selector: 'app-diaglog',
templateUrl: './diaglog.component.html',
styleUrls: ['./diaglog.component.scss'],
})
export class DiaglogComponent implements OnInit {
constructor() {}
private modal: HTMLElement;
ngOnInit(): void {
//========================== Hidden Dialog ==================
// modal: là background full screen 100% bao quanh Dialog
this.modal = document.getElementById('dialog-id');
/**
* Event sẽ ưu tiên xử lý ở Child html tag trc:
* Vì thế khi click vào Dialog thì phần background sẽ ko bắt đc Event
*/
window.onclick = (event) => {
// khi click bên ngoài Dialog phần background Modal
if (event.target == this.modal) {
// 'none' phần html sẽ ẩn đi mà ko chiếm kích thước
this.modal.style.display = 'none';
}
};
}
}
<file_sep>import { Role, ROLES } from './roles';
import { Component, OnInit } from '@angular/core';
@Component({
selector: 'app-binding2-selection',
templateUrl: './binding2-selection.component.html',
styleUrls: ['./binding2-selection.component.scss'],
})
export class Binding2SelectionComponent implements OnInit {
public roles: Role[] = ROLES;
/**
* ứng dụng trong thực tế vd: Search all, thay vì để null
*/
public roleAll: Role = new Role(-1, 'all');
public roleSelect: Role;
// test ngValue và [selected]
public roleSelect2: Role = new Role(-1, 'invalid');
constructor() {}
ngOnInit(): void {
this.roleSelect = ROLES[1];
}
}
<file_sep>import { DiaglogComponent } from './diaglog/diaglog.component';
import { TextEditorComponent } from './text-editor/text-editor.component';
import { Binding2SelectionComponent } from './binding2-selection/binding2-selection.component';
import { NgSwitchComponent } from './ng-switch/ng-switch.component';
import { NgStyleComponent } from './ng-style/ng-style.component';
import { NgForComponent } from './ng-for/ng-for.component';
import { NgClassComponent } from './ng-class/ng-class.component';
import { InputKeyupComponent } from './input-keyup/input-keyup.component';
import { HtmlAttributeComponent } from './html-attribute/html-attribute.component';
import { Binding2RadioComponent } from './binding2-radio/binding2-radio.component';
import { Binding2Component } from './binding2/binding2.component';
import { BindingComponent } from './binding/binding.component';
import { NgModule } from '@angular/core';
import { RouterModule, Routes } from '@angular/router';
/**
* Typescript Object array => syntax giống Json (tường minh {"key": "value"})
* cú pháp giống Json Array. Mỗi item là 1 Route
* đấy là route cấp 1 của App.
*/
const appRoutes: Routes = [
{ path: '', pathMatch: 'full', redirectTo: 'binding' },
{ path: 'binding', component: BindingComponent },
{ path: 'binding-2way', component: Binding2Component },
{ path: 'binding-radio', component: Binding2RadioComponent },
{ path: 'binding-selection', component: Binding2SelectionComponent },
{ path: 'html-attribute', component: HtmlAttributeComponent },
{ path: 'input-keyup', component: InputKeyupComponent },
{ path: 'ng-class', component: NgClassComponent },
{ path: 'ng-for', component: NgForComponent },
{ path: 'ng-style', component: NgStyleComponent },
{ path: 'ng-switch', component: NgSwitchComponent },
{ path: 'text-editor', component: TextEditorComponent },
{ path: 'dialog', component: DiaglogComponent },
];
@NgModule({
/**
* route bản chất là 1 module vì thế cần import
* RouterModule.forRoot(): chỉ đc call 1 lần duy nhất trong App. Còn lại là RouterModule.forChild() ở các module khác
*/
imports: [RouterModule.forRoot(appRoutes)],
exports: [RouterModule],
})
export class AppRoutingModule {}
<file_sep>import { Student } from './Student';
import { Component, OnInit } from '@angular/core';
@Component({
selector: 'app-binding2-radio',
templateUrl: './binding2-radio.component.html',
styleUrls: ['./binding2-radio.component.scss'],
})
export class Binding2RadioComponent implements OnInit {
selectedStudent: Student;
students: Student[] = [
new Student(1, 'hungbeo', 'happy'),
new Student(2, 'Hunggay', 'sad'),
new Student(3, 'Long', 'confused'),
new Student(4, 'Thao', 'Magneta'),
];
constructor() {
// init radio
this.selectedStudent = this.students[1];
//code này ko chạy vì so sánh con trỏ
// this.selectedStudent = new Student(1, 'hungbeo', 'happy');
}
ngOnInit(): void {}
}
|
f1f79ceba909a25a8a4007836a2ddcbebe7bc5f9
|
[
"TypeScript",
"HTML"
] | 17
|
TypeScript
|
hungnguyenmanh82/angular-helloworld
|
10cc446f40c9a0616bcb6f2641618bd3fce39c40
|
0cee2cdd2ef853ff6174f36600cf4739ee15e3fe
|
refs/heads/master
|
<repo_name>hannaliebl/sandramathern.com<file_sep>/portfolio/js/jQuerySlider.js
/* This work is licensed under the Creative Commons Attribution-ShareAlike
* 3.0 Unported License. To view a copy of this license, visit
* http://creativecommons.org/licenses/by-sa/3.0/.
*
* jQuerySlider by <NAME>
* (http://coding-contemplation.blogspot.com)
* (https://github.com/smithcyr)
*
* Unlike other image sliders this jQuery plugin uses the transition between
* two divs to create a slideshow of background images.
* Created for use on http://grinnellultimate.com
*
* There are three parameters that the user can edit.
* @param int interval - the interval in milliseconds between slide transitions
* @param int duration - the duration of the fade between slides
* @param color load_color - the initial color of the first slide as the first image is loaded
* @param object css - a javascript object with each key as the css parameter
* and value for the respective parameter
*
* */
;(function( $ ) {
var defaults = {interval:10000,
duration:500,
load_color:'black',
css: {"position":"absolute",
"background-repeat":"no-repeat",
"background-position": "top center",
"background-size": "cover",
"width":"100%",
"min-height":"100%"}},
pluginName = 'jQuerySlider';
// create the utility container divs used to hold the background images,
// list the current options in a comment, and start the slideshow
function Plugin (el, options) {
base = this;
base.o = {};
$.extend(base.o,defaults,options);
base.el = el;
base.$el = $(el);
base.imgs = base.o.images;
base.num = base.o.images.length;
base.current = 0;
base.topContainer = 0;
base.containers = Array();
base.containers.push($(document.createElement('div'))
.addClass('jqueryslider-slide')
.css(base.o.css)
.css({"z-index":"-1",
"background-color":base.o.load_color}));
base.containers.push($(document.createElement('div'))
.addClass('jqueryslider-slide')
.css(base.o.css)
.css("z-index","-2"));
base.displayOptions = "<!-- ";
for (var key in base.o)
base.displayOptions += key + " - " + base.o[key] + ":";
base.displayOptions += " -->";
base.$el.append(base.containers[0],base.containers[1],base.displayOptions);
base.next_slide = function () {
base.current = (base.current + 1) % base.num;
base.containers[base.topContainer].fadeOut(base.o.duration,function () {
$(this).css({'background-image':'url("' + base.imgs[base.current] + '")',
"z-index":"-2"});
base.containers[(base.topContainer + 1) % 2].css("z-index","-1");
$(this).show();
base.topContainer = (base.topContainer + 1) % 2;
});
};
this.init(base);
};
// fadeOut the foreground div, iterate its background image to the next one
// in queue, switch the two div's z-index, and fadeIn the now-background div
// repeat ad infinitum
Plugin.prototype.init = function (base) {
$('<img/>').attr('src', base.imgs[base.current]).load(function() {
base.containers[(base.topContainer + 1) % 2].css('background-image', 'url(' + base.imgs[base.current] + ')');
if (base.num < 2){
base.containers[base.topContainer].fadeOut(base.o.duration,function () {
$(this).css("z-index","-2");
base.containers[(base.topContainer + 1) % 2].css("z-index","-1");
});
return;
}
base.next_slide();
setInterval(base.next_slide ,base.o.interval);
});
};
$.fn[pluginName] = function (options) {
return this.each( function () {
if(!$.data(this,'plugin_'+pluginName)) {
$.data(this,'plugin_'+pluginName,
new Plugin(this, options));
}
});
};
})( jQuery );<file_sep>/portfolio/about.php
<?php
/*
Template Name: About
*/
?>
<?php
/**
* @package WordPress
* @subpackage portfolio
*/
get_header(); ?>
<div class="row">
<div class="fourcol secondary-nav">
<ul>
<li><a href="#artists-statement">Artist's Statement</a></li>
<li><a href="#bio">Biography</a></li>
<li><a href="#events">Events and Awards</a></li>
</ul>
</div>
</div>
</header>
</div>
<div class="main-container">
<div class="container">
<div class="row">
<div class="twelvecol">
<h2 id="artists-statement">Artist's Statement</h2>
</div>
</div>
<div class="row">
<div class="sixcol-extramargin">
<?php if (have_posts()) : while (have_posts()) : the_post(); ?>
<?php the_content(); ?>
</div>
<p class="return-to-top"><a href="#">Back to the top.</a></p>
</div>
</div>
</div>
<div id="bio-image-container">
</div>
<div class="main-container">
<div class="container">
<div class="row">
<div class="twelvecol">
<h2 id="bio">Biography</h2>
<p><a href="http://sandramathern.com/wp-content/uploads/2013/01/Resume_Mathern_2012.pdf">Download CV</a><br/>
<a href="mailto:<EMAIL>">Email Sandra</a>
</p>
</div>
</div>
<div class="row">
<div class="sixcol-extramargin">
<?php the_field('biography'); ?>
</div>
<p class="return-to-top"><a href="#">Back to the top.</a></p>
</div>
</div>
</div>
<div id="events-image-container">
</div>
<div class="main-container">
<div class="container">
<div class="row">
<div class="twelvecol">
<h2 id="events">Events and Awards</h2>
<?php endwhile; endif; ?>
<p class="return-to-top"><a href="#">Back to the top.</a></p>
</div>
</div>
</div>
</div>
<?php get_footer(); ?><file_sep>/portfolio/header.php
<?php
/**
* @package WordPress
* @subpackage portfolio
*/
?>
<!DOCTYPE html>
<!--[if lt IE 7]> <html class="no-js lt-ie9 lt-ie8 lt-ie7"> <![endif]-->
<!--[if IE 7]> <html class="no-js lt-ie9 lt-ie8"> <![endif]-->
<!--[if IE 8]> <html class="no-js lt-ie9"> <![endif]-->
<!--[if gt IE 8]><!--> <html class="no-js"> <!--<![endif]-->
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1">
<title><?php wp_title('«', true, 'right'); ?> <?php bloginfo('name'); ?></title>
<meta name="description" content="">
<meta name="viewport" content="width=device-width">
<!-- Google Webfonts -->
<link href='http://fonts.googleapis.com/css?family=Playfair+Display:400' rel='stylesheet' type='text/css'>
<link href='http://fonts.googleapis.com/css?family=Open+Sans:400italic,400,300,600' rel='stylesheet' type='text/css'>
<link href='http://fonts.googleapis.com/css?family=Source+Sans+Pro:200,300,400' rel='stylesheet' type='text/css'>
<?php if (is_page( 'About' ) || (get_post_type() == 'page' && has_post_thumbnail())) {
wp_enqueue_script('jQuerySlider');
} ?>
<!-- Stylesheets -->
<?php versioned_stylesheet($GLOBALS["TEMPLATE_RELATIVE_URL"]."css/normalize.css") ?>
<?php versioned_stylesheet($GLOBALS["TEMPLATE_RELATIVE_URL"]."css/ie.css") ?>
<?php versioned_stylesheet($GLOBALS["TEMPLATE_RELATIVE_URL"]."css/style.css") ?>
<!-- Wordpress Templates require a style.css in theme root directory -->
<?php versioned_stylesheet($GLOBALS["TEMPLATE_RELATIVE_URL"]."style.css") ?>
<!-- All JavaScript at the bottom, except for Modernizr which enables HTML5 elements & feature detects -->
<?php versioned_javascript($GLOBALS["TEMPLATE_RELATIVE_URL"]."html5-boilerplate/js/vendor/modernizr-2.6.1.min.js") ?>
<?php wp_head(); ?>
</head>
<body>
<!--[if lt IE 7]>
<p class="chromeframe">You are using an <strong>outdated</strong> browser. Please <a href="http://browsehappy.com/">upgrade your browser</a> or <a href="http://www.google.com/chromeframe/?redirect=true">activate Google Chrome Frame</a> to improve your experience.</p>
<![endif]-->
<div id="header-container">
<?php
if (is_page( 'About' )) {
$term = \get_term_by('name', 'aboutslideshow', 'category');
$attachments = get_posts(array(
'category' => $term->term_id,
'post_type' => 'attachment',
));
$bgimages = array();
foreach ($attachments as $bgimage) {
$img_url = wp_get_attachment_image_src($bgimage->ID,'full');
array_push($bgimages, $img_url[0]);
}
echo "<script>(function ($) {
$('#header-container').jQuerySlider({images:Array('" . implode("','",$bgimages) . "'),".
"interval:6000,".
"duration:1200});".
"})(jQuery);".
"</script>";
}
else if (get_post_type() == 'page' && has_post_thumbnail()) {
$img_url = wp_get_attachment_image_src( get_post_thumbnail_id(), 'full');
echo "<script>(function ($) {".
"$('#header-container').jQuerySlider({images:Array('" . $img_url[0] . "'),".
"interval:6000,".
"duration:1200});".
"})(jQuery);".
"</script>";
} else {
echo "<style> #header-container {background: black}</style>";
}
?>
<header class="container">
<nav class="row">
<div class="twelvecol">
<?php
wp_nav_menu(
array(
'theme_location' => 'nav-main',
'menu_class' => 'menu',
'depth' => '1'
)
);
?>
<h1><NAME></h1>
</div>
</nav>
<div class="row">
<div class="twelvecol">
<h2><?php the_title(); ?></h2>
</div>
</div><file_sep>/portfolio/footer.php
<?php
/**
* @package WordPress
* @subpackage portfolio
*/
?>
<footer id="mainfooter" class="container">
<div class="row">
<div class="sixcol">
<?php
wp_nav_menu(
array(
'theme_location' => 'nav-footer',
'menu_class' => 'menu-footer',
'depth' => '1'
)
);
?>
<p class="clear"><a href="mailto:<EMAIL>">Email Sandra</a></p>
</div>
<div class="sixcol last">
<p class="credit">Photography by <NAME><br />
Development by <a href="http://coding-contemplation.blogspot.com/"><NAME></a>, Design by <a href="http://www.hannaliebl.com"><NAME></a><br />
© <?php echo date("Y") ?></p>
</div>
</div>
</footer>
<script src="//ajax.googleapis.com/ajax/libs/jquery/1.8.3/jquery.min.js"></script>
<script>window.jQuery || document.write('<script src="js/vendor/jquery-1.8.3.min.js"><\/script>')</script>
<script src="js/plugins.js"></script>
<script src="js/main.js"></script>
</body>
</html>
<file_sep>/portfolio/functions/media_integration.php
<?php
/**
* @package WordPress
*/
/* Sets up media integration with the theme.
*
* Adds theme option fields for Flickr ID and Vimeo ID for use in the works
* pages and all slideshows. Includes the Flickr PHP library and
* initializes it when settings are updated.
*/
// The vimeo api base url
$_GLOBALS['vimeo_base_url'] = "http://vimeo.com/api/v2/";
// Include the php library for Flickr integration
// /functions/phpFlicker/phpFlickr.php
//include( $GLOBALS["TEMPLATE_DIR_URL"] . "/functions/phpFlickr/phpFlickr.php");
// utility function to call flickr methods without the phpFlickr object
// taken and modified from the flickr example php response function:
// http://www.flickr.com/services/api/response.php.html
function call_flickr_api_method ($arguments) {
// use the arguments to build the parameter array and construct the url
$params = array_merge(array('format'=>'php_serial'),$arguments);
$encoded_params = array();
foreach ($params as $k => $v)
$encoded_params[] = urlencode($k).'='.urlencode($v);
$url = "http://api.flickr.com/services/rest/?".implode('&', $encoded_params);
// return response array
return unserialize( file_get_contents($url) );
}
// Curl helper function taken and modified from the vimeo example php file:
// https://github.com/vimeo/vimeo-api-examples/blob/master/simple-api/gallery/php-example.php
function vimeo_call($user_id,$request) {
$curl = curl_init( $_GLOBALS['vimeo_base_url'] . $user_id . '/' . $request . '.php');
curl_setopt($curl, CURLOPT_RETURNTRANSFER, 1);
curl_setopt($curl, CURLOPT_TIMEOUT, 30);
curl_setopt($curl, CURLOPT_FOLLOWLOCATION, 1);
$return = curl_exec($curl);
curl_close($curl);
return unserialize($return);
}
// check if the api key is valid before updating
function update_flickr_api ($old_key, $new_key) {
$rsp_obj = call_flickr_api_method(array(
'api_key' => $new_key,
'method' => 'flickr.test.echo'
));
_log($rsp_obj['stat']);
// check if flickr saw the api key as valid and update if so
if ($rsp_obj['stat'] == 'ok') {
$_GLOBALS['flickr'] = new phpFlickr($new_key);
return $new_key;
} else {
return $old_key;
}
}
// check if the user id works with the flickr api (i.e. we have permission)
// before updating it
function update_flickr_usr ($old_id, $new_id) {
$api_key = get_option('flickr_api_key');
if (! $api_key)
return $old_id;
$rsp_obj = call_flickr_api_method(array(
'api_key' => $api_key,
'user_id' => $new_id,
'method' => 'flickr.people.getInfo'
));
// check if flickr saw the NSID as valid and update if so
if ($rsp_obj['stat'] == 'ok') {
return $new_id;
} else {
return $old_id;
}
}
// check if vimeo user is real before updating the option
function update_vimeo_usr( $old_id, $new_id) {
return (vimeo_call($_GLOBALS['vimeo_base_url'] . $new_id . "/info.php") ?
$new_id :
$old_id );
}
add_action('update_option_vimeo_user_id','update_vimeo_usr');
add_action('update_option_flickr_api_key','update_flickr_api');
add_action('update_option_flickr_user_id','update_flickr_usr');
/* Add flickr api key, flickr user id, and vimeo user id as theme customization fields
* to be saved as options values.
*/
function media_customize_register( $wp_customize )
{
$wp_customize->add_section( 'media_integration' , array(
'title' => __('Media Integration'),
'priority' => 30,
'description'=> __('Necessary details for the accounts that are to be integrated into the Theme.')
) );
$wp_customize->add_setting( 'vimeo_user_id' , array(
'default' => '',
'transport' => 'refresh',
'type' => 'option'
) );
$wp_customize->add_control( new WP_Customize_Control( $wp_customize, 'vimeo_id', array(
'label' => __( 'Vimeo User ID' ),
'section' => 'media_integration',
'settings' => 'vimeo_user_id'
) ) );
$wp_customize->add_setting( 'flickr_api_key' , array(
'default' => '',
'transport' => 'refresh',
'type' => 'option'
) );
$wp_customize->add_control( new WP_Customize_Control( $wp_customize, 'flickr_api_key', array(
'label' => __( 'Flickr API Key' ),
'section' => 'media_integration',
'settings' => 'flickr_api_key'
) ) );
$wp_customize->add_setting( 'flickr_user_id' , array(
'default' => '',
'transport' => 'refresh',
'type' => 'option'
) );
$wp_customize->add_control( new WP_Customize_Control( $wp_customize, 'flickr_id', array(
'label' => __( 'Flickr User ID' ),
'section' => 'media_integration',
'settings' => 'flickr_user_id'
) ) );
}
add_action( 'customize_register', 'media_customize_register' );<file_sep>/portfolio/header-works.php
<?php
/**
* @package WordPress
* @subpackage portfolio
*/
?>
<!DOCTYPE html>
<!--[if lt IE 7]> <html class="no-js lt-ie9 lt-ie8 lt-ie7"> <![endif]-->
<!--[if IE 7]> <html class="no-js lt-ie9 lt-ie8"> <![endif]-->
<!--[if IE 8]> <html class="no-js lt-ie9"> <![endif]-->
<!--[if gt IE 8]><!--> <html class="no-js"> <!--<![endif]-->
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1">
<title><?php wp_title('«', true, 'right'); ?> <?php bloginfo('name'); ?> – <?php the_title(); ?></title>
<meta name="description" content="">
<meta name="viewport" content="width=device-width">
<!-- Google Webfonts -->
<link href='http://fonts.googleapis.com/css?family=Playfair+Display:400' rel='stylesheet' type='text/css'>
<link href='http://fonts.googleapis.com/css?family=Open+Sans:400italic,400,300,600' rel='stylesheet' type='text/css'>
<link href='http://fonts.googleapis.com/css?family=Source+Sans+Pro:200,300,400' rel='stylesheet' type='text/css'>
<!-- Stylesheets -->
<?php versioned_stylesheet($GLOBALS["TEMPLATE_RELATIVE_URL"]."css/normalize.css") ?>
<?php versioned_stylesheet($GLOBALS["TEMPLATE_RELATIVE_URL"]."css/ie.css") ?>
<?php versioned_stylesheet($GLOBALS["TEMPLATE_RELATIVE_URL"]."css/style.css") ?>
<!-- Wordpress Templates require a style.css in theme root directory -->
<?php versioned_stylesheet($GLOBALS["TEMPLATE_RELATIVE_URL"]."style.css") ?>
<!-- All JavaScript at the bottom, except for Modernizr which enables HTML5 elements & feature detects -->
<?php versioned_javascript($GLOBALS["TEMPLATE_RELATIVE_URL"]."html5-boilerplate/js/vendor/modernizr-2.6.1.min.js") ?>
</head>
<body>
<!--[if lt IE 7]>
<p class="chromeframe">You are using an <strong>outdated</strong> browser. Please <a href="http://browsehappy.com/">upgrade your browser</a> or <a href="http://www.google.com/chromeframe/?redirect=true">activate Google Chrome Frame</a> to improve your experience.</p>
<![endif]-->
<div id="works-container">
<header class="container">
<nav class="row">
<div class="twelvecol">
<?php
wp_nav_menu(
array(
'theme_location' => 'nav-main',
'menu_class' => 'menu',
'depth' => '1'
)
);
?>
<h1><NAME></h1>
</div>
</nav>
<div class="row">
<div class="twelvecol">
<h2><?php the_title(); ?></h2>
</div>
</div><file_sep>/portfolio/functions/tagline.php
<?php
add_action('init', 'tagline');
function tagline() {
$args = array(
'label' => _('tagline'),
'public' => true,
'show_ui' => true,
'capability_type' => 'post',
'hierarchical' => true,
'has_archive' => false,
'supports' => array('title', 'editor'), );
//Register type and custom taxonomy for this type.
register_post_type( 'taglines' , $args );
}
?><file_sep>/portfolio/works-page.php
<?php
/*
Template Name: Works
*/
?>
<?php
/**
* @package WordPress
*/
?>
<?php get_header('works'); ?>
<script>
$('#works-container').delegate('#work','click',function () {
var data = {action:'get_work',work_id:$(this).attr('data-id')};
jQuery.post(MyAjax.ajaxurl, data, function(response) {
$('article').html(response);
});
data['action'] = 'get_flickr';
jQuery.post(MyAjax.ajaxurl, data, function(response) {
$("#header-container").jQuerySlider({
images:[ response ]
});
$("html, body").animate({
scrollTop: $("#work").offset().top
}, 1500);
});
});
</script>
<div id="works-container">
<?php display_works($post->ID); ?>
</div>
</header>
</div>
<article>
<?php
if ($_POST['workid'] && get_post($_POST['workid'])) {
echo display_work($_POST['workid']);
echo '<script>'.
'$("#header-container").jQuerySlider({'.
'images:['.
display_flickr_photoset_urls($_POST['workid']) .
']'.
'});'.
'$("html, body").animate({' .
'scrollTop: $("#work").offset().top' .
'}, 1500);'.
'</script>';
}
?>
</article>
<!-- End main content -->
<?php get_footer(); ?><file_sep>/portfolio/functions/works.php
<?php
/**
* @package WordPress
*/
/* By <NAME> (http://coding-contemplation.blogspot.com/)
works.php adds functionality for adding "work" a custom post-type as well as
"show" and "award" all of which are interconnected.
*/
// add the 'work', 'show', and 'award' post-types
function add_custom_post_types() {
$labels = array(
'name' => _x('Works', 'post type general name'),
'singular_name' => _x('Work', 'post type singular name'),
'add_new' => _x('Add New', 'work'),
'add_new_item' => __('Add New Work'),
'edit_item' => __('Edit Work'),
'new_item' => __('New Work'),
'all_items' => __('All Works'),
'view_item' => __('View Work'),
'search_items' => __('Search Works'),
'not_found' => __('No Works found'),
'not_found_in_trash' => __('No Works found in Trash'),
'parent_item_colon' => '',
'menu_name' => __('Works')
);
$supports = array( 'title', 'editor', 'thumbnail' );
$args = array(
'labels' => $labels,
'public' => true,
'publicly_queryable' => true,
'show_ui' => true,
'show_in_nav_menus' => false,
'show_in_menu' => true,
'show_in_admin_bar' => true,
'menu_position' => null, // This defaults to below comments ( 60 > , < 25)
// 'menu_icon' => 'url to icon for this menu'
'hierarchical' => false,
'supports' => $supports,
'query_var' => true,
'register_meta_box_cb' => 'works_meta_boxes'
);
/* The 'work' post-type will have the following fields:
* title - title of the work
* item_date - a date for comparison (inputted as month year, stored as unix timestamp)
* editor - description of the work
* related_awards - the associated award(s) for this work (stored as an array of post ids)
* related_shows - the associated show(s) for this work (stored as an array of post ids)
*/
register_post_type('work',$args);
$labels = array(
'name' => _x('Shows', 'post type general name'),
'singular_name' => _x('Show', 'post type singular name'),
'add_new' => _x('Add New', 'show'),
'add_new_item' => __('Add New Show'),
'edit_item' => __('Edit Show'),
'new_item' => __('New Show'),
'all_items' => __('All Shows'),
'view_item' => __('View Show'),
'search_items' => __('Search Shows'),
'not_found' => __('No Shows found'),
'not_found_in_trash' => __('No Shows found in Trash'),
'parent_item_colon' => '',
'menu_name' => __('Shows')
);
$supports = array('editor');
$args = array(
'labels' => $labels,
'public' => true,
'publicly_queryable' => true,
'show_ui' => true,
'show_in_nav_menus' => false,
'show_in_menu' => true,
'show_in_admin_bar' => true,
'menu_position' => null, // This defaults to below comments ( 60 > , < 25)
// 'menu_icon' => 'url to icon for this menu'
'hierarchical' => false,
'supports' => $supports,
'query_var' => true,
'register_meta_box_cb' => 'shows_meta_boxes'
);
/* The 'show' post-type will have the following fields:
* item_date - a date range array of two values (end, start)
* editor - html string to display as the location (with link to venue)
* related_works - the associated work(s) for this show (stored as an array of post ids)
*/
register_post_type('show',$args);
$labels = array(
'name' => _x('Awards', 'post type general name'),
'singular_name' => _x('Award', 'post type singular name'),
'add_new' => _x('Add New', 'award'),
'add_new_item' => __('Add New Award'),
'edit_item' => __('Edit Award'),
'new_item' => __('New Award'),
'all_items' => __('All Awards'),
'view_item' => __('View Award'),
'search_items' => __('Search Awards'),
'not_found' => __('No Awards found'),
'not_found_in_trash' => __('No Awards found in Trash'),
'parent_item_colon' => '',
'menu_name' => __('Awards')
);
$supports = array( 'title', 'editor');
$args = array(
'labels' => $labels,
'public' => true,
'publicly_queryable' => true,
'show_ui' => true,
'show_in_nav_menus' => false,
'show_in_menu' => true,
'show_in_admin_bar' => true,
'menu_position' => null, // This defaults to below comments ( 60 > , < 25)
// 'menu_icon' => 'url to icon for this menu'
'hierarchical' => false,
'supports' => $supports,
'query_var' => true,
'register_meta_box_cb' => 'awards_meta_boxes'
);
/* The 'award' post-type will have the following fields:
* title - the name of the award
* item_date - the date the accolade was awarded, stored as unix timestamp
* editor - brief description of the award (with links to associated web presence)
* related_works - the associated work(s) for this award (stored as an array of post ids)
*/
register_post_type('award',$args);
}
add_action('init','add_custom_post_types');
/* Add jQuery UI datepicker support when editing 'work', 'show', and 'award' posts.
*/
function edit_custom_post_scripts() {
global $pagenow, $typenow, $wp_scripts;
if ( $pagenow=='edit.php' || $pagenow == 'post-new.php') {
if ( $typenow == 'work' ) {
wp_enqueue_script('jquery');
$ui = $wp_scripts->query('jquery-ui-core');
$url = "https://ajax.googleapis.com/ajax/libs/jqueryui/{$ui->ver}/themes/base/jquery-ui.css";
wp_enqueue_style('jqueryuicss', $url, false, $url->ver);
wp_enqueue_script('monthpicker',array('jquery'));
}
if ($typenow == 'show' || $typenow == 'award') {
wp_enqueue_script('jquery');
$ui = $wp_scripts->query('jquery-ui-core');
$url = "https://ajax.googleapis.com/ajax/libs/jqueryui/{$ui->ver}/themes/base/jquery-ui.css";
wp_enqueue_style('jqueryuicss', $url, false, $url->ver);
wp_enqueue_script('jquery-ui-datepicker',array('jquery'));
}
}
}
add_action('admin_enqueue_scripts','edit_custom_post_scripts');
/* Utility function used to initialize the meta boxes on the
* "work" post-type administrative edit page
*/
function works_meta_boxes() {
add_meta_box(
'item_date',
__('Date of Work'),
'add_date_loc_of_item',
'work',
'side'
);
add_meta_box(
'related_events',
__( 'Related Awards and Shows' ),
'add_related_events',
'work',
'normal'
);
add_meta_box(
'related_media',
__( 'Related Media' ),
'add_related_media',
'work',
'side'
);
}
/* Utility function used to initialize the meta boxes on the
* "show" post-type administrative edit page
*/
function shows_meta_boxes(){
add_meta_box(
'show_works',
__( 'Related Works' ),
'add_related_works',
'show',
'normal'
);
add_meta_box(
'item_date',
__('Show\'s Date(s)'),
'add_date_loc_of_item',
'show',
'side'
);
}
/* Utility function used to initialize the meta boxes on the
* "award" post-type administrative edit page
*/
function awards_meta_boxes(){
add_meta_box(
'award_works',
__( 'Related Works' ),
'add_related_works',
'award',
'normal'
);
add_meta_box(
'item_date',
__('Date of Award'),
'add_date_loc_of_item',
'award',
'side'
);
}
function admin_footer_scripts() {
global $post;
switch (get_post_type($post->ID)) {
case "work":
echo "<script type='text/javascript'>
(function($) {
$('#work-datepicker').monthpicker( {
pattern: 'yyyy-mm'
});
})(jQuery);
</script>";
break;
case "show":
echo "<script type='text/javascript'>
(function($) {
$('#show-datepicker-from').datepicker( {
changeMonth: true,
changeYear: true,
showButtonPanel: true,
dateFormat: 'yy-mm-dd',
onClose: function( selectedDate ) {
$( '#show-datepicker-to' ).datepicker( 'option', 'minDate', selectedDate );
}
});
})(jQuery);
(function($) {
$('#show-datepicker-to').datepicker( {
changeMonth: true,
changeYear: true,
showButtonPanel: true,
dateFormat: 'yy-mm-dd',
onClose: function( selectedDate ) {
$( '#show-datepicker-from' ).datepicker( 'option', 'maxDate', selectedDate );
}
});
})(jQuery);
</script>";
break;
case "award":
echo "<script type='text/javascript'>
(function($) {
$('#award-datepicker').datepicker( {
changeMonth: true,
changeYear: true,
showButtonPanel: true,
dateFormat: 'yy-mm-dd'
});
})(jQuery);
</script>";
break;
}
}
add_action( 'admin_print_footer_scripts', 'admin_footer_scripts' );
/* Adds the date/location metabox to the work edit page
* Used code from http://stackoverflow.com/questions/2208480/jquery-ui-datepicker-to-show-month-year-only
* to limit picking of date to only month year for works.
*/
function add_date_loc_of_item( $post ) {
switch (get_post_type($post->ID)){
case "work":
echo "<label for='item_date'>Date of Work:</label>";
echo "<input type='text' id='work-datepicker' name='item_date' readonly>";
break;
case "show":
echo "<div>Date(s) of Show</div>";
echo "<label for='item_date_from'>From: </label>";
echo "<input type='text' id='show-datepicker-from' name='item_date_from' readonly><br/>";
echo "<label for='item_date_to'> To: </label>";
echo "<input type='text' id='show-datepicker-to' name='item_date_to' readonly>";
break;
case "award":
echo "<label for='item_date'>Date of Work:</label>";
echo "<input type='text' id='award-datepicker' name='item_date' readonly>";
break;
}
}
// Adds the related awards and shows metabox to the work edit page
function add_related_events( $post ) {
// nonce used for verification by update_custom_post_type
wp_nonce_field(get_post_type($post->ID),'custom_nonce');
// query all posts with the 'award' post-type
$args = array(
'post_type' => 'award',
'order' => 'ASC'
);
$awards = get_posts($args);
if ($awards) {
// display the awards in a select table and preselect the terms that have the current work's id
// stored in the related_works post meta field
echo "<span>";
echo "<label for='related_awards[]'>Related Award(s)</label>";
echo "<select name='related_awards[]' multiple='multiple'>";
foreach($awards as $award) {
echo "<option value='" . $award->ID . "'" .
(get_post_custom_values($award->ID,'related_works') &&
in_array($post->ID, get_post_custom_values($award->ID,'related_works'))
? " selected='selected'":"") .
">" . get_the_title($award->ID) . ' ' . get_post_meta($award->ID,'showtime') . "</option>";
}
echo "</select></span>";
} else {
echo "<span>No Awards</span>";
}
// change query for all posts of 'show' post-type
$args['post_type'] = 'show';
$shows = get_posts($args);
echo "<span style='margin-left:2em'>";
if ($shows) {
// display the shows in a select table and preselect those which have the current work's id
// stored in the related_works post meta field
echo "<label for='related_shows[]'>Related Show(s)</label>";
echo "<select name='related_shows[]' multiple='multiple'>";
foreach($shows as $show) {
echo "<option value='" . $show->ID . "'" .
(get_post_custom_values($show->ID,'related_works') &&
in_array($post->ID, get_post_custom_values($show->ID,'related_works'))
? " selected='selected":"") .
">" . get_the_title($show->ID) . ' ' . get_post_meta($show->ID,'showtime') . "</option>";
}
echo "</select>";
} else {
echo "No Shows";
}
echo "</span>";
}
// Adds the related works metabox to the award and show edit page
function add_related_works( $post ) {
// used in update_custom_post_type for verification
wp_nonce_field(get_post_type($post->ID),'custom_nonce');
// query all posts with the 'work' post-type
$args = array(
'post_type' => 'work',
'order' => 'ASC'
);
$works = get_posts($args);
// display a select array of all works and preselects the works which are in the 'related_works'
// post meta field of the current award or show
echo "<label for='related_works'>Related Work(s)</label>";
echo "<select name='related_works' multiple='multiple'>";
foreach($works as $work) {
echo "<option value='" . $work->ID . "'" .
(get_post_custom_values($post->ID,'related_works') &&
in_array($work->ID, get_post_custom_values($post->ID,'related_works'))
? " selected='selected'":"") .
">" . get_the_title($work->ID) . "</option>";
}
echo "</select>";
}
// Adds the flickr photoset selection and vimeo album selection
// to the work edit page with respect to the media settings in the
// theme options
function add_related_media( $post ) {
$flickr_user_id = get_option('flickr_user_id');
if(isset($_GLOBALS['flickr']) && $flickr_user_id) {
global $flickr;
$photosets = $flickr->photosets_getList($flickr_user_id);
echo "<label for='flickr_photoset'>Photoset for Header:</label>";
echo "<select name='flickr_photoset'>";
foreach($photosets['photosets']['photoset'] as $photoset) {
echo "<option value='" . $photoset['id'] . "'" .
(get_post_meta($post->ID,'related_photoset') &&
in_array($photoset['id'], get_post_meta($post->ID,'related_photoset'))
? " selected='selected'":"") .
">" . $photoset['title']['_content'] . "</option>";
}
echo "</select>";
} else {
echo "<span>Your Flickr integration settings are incorrect.<br/></span>";
}
$vimeo_user_id = get_option('vimeo_user_id');
if($vimeo_user_id) {
echo "<label for='vimeo_album'>Vimeo Album to Include:</label>";
echo "<select name='vimeo_album'>";
$vimeo_albums = vimeo_call( $vimeo_user_id , 'albums' );
foreach($vimeo_albums as $album) {
echo "<option value='" . $album['id'] . "'" .
(get_post_meta($post->ID,'related_vimeo') &&
in_array($album['id'], get_post_meta($post->ID,'related_vimeo'))
? " selected='selected'":"") .
">" . $album['title'] . "</option>";
}
echo "</select>";
} else {
echo "<span>Your Vimeo integration settings are incorrect.<br/></span>";
}
echo "<div>" . get_option('vimeo_user_id') . " -- " .
get_option('flickr_user_id') . " -- " .
get_option('flickr_api_key') . " -- " .
($_GLOBALS['flickr']?"true":"false") . "</div>";
}
/* Called on the update or addition of posts. Used to update
* or set the post-meta for the post-types: "work", "show", "award"
*/
function update_custom_post_type( $post_id ) {
// check if we're not autosaving, the nonce is correct, and the user has permission
if ((defined( 'DOING_AUTOSAVE' ) && DOING_AUTOSAVE) ||
!wp_verify_nonce( $_POST['post_type'], 'custom_nonce' ) ||
!current_user_can( 'edit_page', $post_id ))
return;
// save the specified fields given the post-type
switch ($_POST['post_type']) {
case "work":
$diff = array_merge(array_diff($_POST['related_awards[]'] + $_POST['related_shows[]'],
get_post_custom_values($post_id,'related_awards') + get_post_custom_values($post_id,'related_shows')));
foreach ($diff as $event) {
$related_works = get_post_custom_values($event,'related_works');
if (!in_array($post_id,$related_works))
array_push($post_id,$related_works);
else
array_remove($event,$related_works);
}
update_post_meta($post_id,'related_awards',$_POST['related_awards[]']);
update_post_meta($post_id,'related_shows',$_POST['related_shows[]']);
if ($_POST['item_date'])
update_post_meta($post_id,'item_date',array(strtotime($_POST['item_date'])));
if ($_POST['related_photoset'])
update_post_meta($post_id,'related_photoset',$_POST['related_photoset']);
if ($_POST['related_vimeo'])
update_post_meta($post_id,'related_vimeo',$_POST['related_vimeo']);
break;
case "show":
$diff = array_merge(array_diff($_POST['related_works[]'],get_post_custom_values($post_id,'related_works')));
foreach ($diff as $work) {
$related_shows = get_post_custom_values($work,'related_shows');
if (!in_array($post_id,$related_shows))
array_push($post_id,$related_shows);
else
array_remove($work,$related_shows);
}
update_post_meta($post_id,'related_works',$_POST['related_works[]']);
if ($_POST['item_date_to'] || $_POST['item_date_from']) {
$event_time = array();
if ($_POST['item_date_to'])
array_push($_POST['item_date_to'],$event_time);
if ($_POST['item_date_from'])
array_push($_POST['item_date_from'],$event_time);
}
if ($event_time)
update_post_meta($post_id,'item_date',$event_time);
break;
case "award":
$diff = array_merge(array_diff($_POST['related_works[]'],get_post_custom_values($post_id,'related_works')));
foreach ($diff as $work) {
$related_shows = get_post_custom_values($work,'related_awards');
if (!in_array($post_id,$related_awards))
array_push($post_id,$related_awards);
else
array_remove($work,$related_awards);
}
update_post_meta($post_id,'related_works',$_POST['related_works[]']);
if ($_POST['item_date'])
update_post_meta($post_id,'item_date',array(strtotime($_POST['item_date'])));
break;
}
}
add_action('update_post','update_custom_post_type');
/* Used on the "works" page to display every work as its
* corresponding image linking to its respective page.
* @param (int) $work_id - used to highlight the work that is currently being shown
*/
function display_works( $work_id ) {
// Query a list of all 'work' posts and order them by the meta field work_date
// from newest to oldest (highest to lowest since my dates are stored as UNIX timestamps)
$args = array(
'post_type' => 'work',
'orderby' => 'meta_value_num',
'meta_key' => 'work_date',
'order' => 'DESC'
);
$works = get_posts($args);
// for each 'work' post display it and the title of that work for use in the main work page
foreach ($works as $work) {
echo "<div data-id='" . $work->ID . "' class='work" .
($work_id == $work->ID ? " current" : "") . "'>";
echo "<img url='" . (post_has_thumbnail($work->ID) ?
wp_get_attachment_thumb_url(get_post_thumbnail_id($work->ID)) :
"") .
"' >";
echo "<span>" . get_the_title($work->ID) . "</span>";
echo "</div>";
}
}
/* Used to display a chronological list of shows and/or awards associated with
* the post id passed as the parameter. A null parameter will list all awards and shows.
* If work_id is set, the associated work(s) column is left out.
* Sortabililty will be added later.
* @param (int) $work_id - the id of the work whose associated shows and awards will be listed
*/
function list_attributed( $work_id = false ) {
$args = array(
'post_type' => array('award','show'),
'orderby' => 'meta_value_num',
'meta_key' => 'item_date',
'order' => 'DESC'
);
if ($work_id)
$args['meta_query'] = array(
array(
'key' => 'related_works',
'value' => $work_id,
'compare' => 'LIKE'
)
);
// Retrieve a chronologically sorted list of associated (or not) 'show' and 'award' posts
$attributed = get_posts($args);
$num_items = count($attributed);
$item_count = 0;
// If there are no associated shows or awards then don't display the container
if ($num_items > 0) {
// Display the table header
echo "<table>" .
"<thead>" .
"<td>".
"<th>Name or Location</th>".
"<th>Date</th>".
($work_id ? "" : "<th>Attributed Work</th>") .
"</td>".
"</thead>".
"<tbody>";
function title_util_func($n) {
return get_post_field('post_title',$n);
}
// Display each
foreach ($attributed as $item) {
// Get a imploded list of the titles of works associated with this show or award
if (!$work_id) {
$related_works = get_post_custom_values($item->ID,'related_works');
$disp_works = implode( ', ',
array_map(
'title_util_func',
$related_works)
);
}
// Get the date, if a new year has been reached, close the old row group,
// create a new row group, and display the year as a header for that group
$item_dates = array_map(intval,get_post_custom_values($item->ID,'item_date'));
if ( !$disp_year || intval(date('Y',$item_dates[0])) < $disp_year )
echo "<tr><td>" . date('Y',$item_dates[0]) . "</td></tr>";
$time_format = "%a %m/%d";
// Display the various attributes of the item. If the date is a range, display it as such.
echo "<tr>";
echo "<td>" . get_post_field('post_content',$item->ID) . "</td>".
"<td>" . strftime($time_format,$item_dates[0]) .
($item_type == "show" && isset($item_dates[1]) ?
" - " . strftime($time_format,$item_dates[1]) :
"" ) .
"</td><td>";
if ($work_id) {
$disp_works = array();
foreach ($related_works as $work)
array_push($disp_works,"<a href='" . get_permalink($work) . "'>"
. get_post_field('post_title',$work) . "</a>");
echo implode(',',$disp_works);
}
echo "</td></tr>";
}
echo "</tbody></table>";
}
}
// used when setting up the slideshow after ajaxing the html content on the works page
add_action('wp_ajax_nopriv_get_flickr','ajax_flickr');
function ajax_flickr() {
echo display_flickr_photoset_urls($_POST['work_id']);
exit();
}
// @param str $work_id - the post id of the work post
// returns a string of a comma separated list of quoted urls of the images for the slideshow
function display_flickr_photoset_urls($work_id) {
if (!$_GLOBALS['flickr']){
return "";
}
$photoset_id = get_post_meta($work_id, 'related_photoset');
$flickr_return = $_GLOBALS['flickr']->photosets_getPhotos($photoset_id);
$photos = Array();
foreach ($flickr_return['photoset']['photo'] as $photo)
array_push($photos,$_GLOBALS['flickr']->buildPhotoURL($photo));
return '"' . implode('","',$photos) . '"';
}
// @param str $work_id - the post id of the work post
// returns a string of embed html vimeo videos related to the work post
function display_vimeo_album($work_id) {
if( !get_post_meta($work_id, 'related_vimeo', true) )
return "";
$output = "";
$vims = vimeo_call('album/'.get_post_meta($work_id, 'related_vimeo', true),'videos.php');
foreach ($vims[videos] as $video) {
$output .= do_shortcode('[vimeo ' . $video['id'] . ']');
}
return $output;
}
// used with the work page to ajax the selected work's html
add_action('wp_ajax_nopriv_get_work','ajax_works_page');
function ajax_works_page() {
echo display_work($_POST['work_id']);
exit();
}
// creates the html for the specific work
function display_work($work_id) {
return '<header id="header-container">'.
'<a id="work"></a>'.
'<div class="row"><h2>'.
get_the_title($work_id) .
'</h2></div>'.
'</header>'.
'<div class="main-container extra-padding">'.
'<div class="container">'.
'<div class="row">'.
'<div class="sixcol">'.
get_post_field('post_content',$work_id) .
'</div>'.
'<div class="sixcol last">'.
list_attributed($work_id) .
display_vimeo_album($work_id) .
'</div>'.
'</div>'.
'</div>'.
'</div>';
}<file_sep>/portfolio/functions.php
<?php
/**
* @package WordPress
* @subpackage HTML5_Boilerplate
*/
// --------------------- START DEBUGGING
if(!function_exists('_log')){
function _log( $message ) {
if( WP_DEBUG === true ){
if( is_array( $message ) || is_object( $message ) ){
error_log( print_r( $message, true ) );
} else {
error_log( $message );
}
}
}
}
// -------------------- END DEBUGGING
// Register the jQuery plugin I made for the front page slideshow
wp_register_script( 'jQuerySlider',
get_template_directory_uri() . "/js/jQuerySlider.js",
array("jquery"));
// Enable categories for attachments so that they can be queried for the
// front-page slideshow
add_action('admin_init', 'reg_tax');
function reg_tax() {
register_taxonomy_for_object_type('category', 'attachment');
add_post_type_support('attachment', 'category');
}
// include the media integration functions and libraries
// include the functions related to custom post types 'work', 'show', 'award'
include(get_template_directory() . "/functions/works.php");
include(get_template_directory() . "/functions/media_integration.php");
include(get_template_directory() . "/functions/phpFlickr/phpFlickr.php");
// Custom HTML5 Comment Markup
function mytheme_comment($comment, $args, $depth) {
$GLOBALS['comment'] = $comment; ?>
<li>
<article <?php comment_class(); ?> id="comment-<?php comment_ID(); ?>">
<header class="comment-author vcard">
<?php echo get_avatar($comment,$size='48',$default='<path_to_url>' ); ?>
<?php printf(__('<cite class="fn">%s</cite> <span class="says">says:</span>'), get_comment_author_link()) ?>
<time><a href="<?php echo htmlspecialchars( get_comment_link( $comment->comment_ID ) ) ?>"><?php printf(__('%1$s at %2$s'), get_comment_date(), get_comment_time()) ?></a></time>
<?php edit_comment_link(__('(Edit)'),' ','') ?>
</header>
<?php if ($comment->comment_approved == '0') : ?>
<em><?php _e('Your comment is awaiting moderation.') ?></em>
<br />
<?php endif; ?>
<?php comment_text() ?>
<nav>
<?php comment_reply_link(array_merge( $args, array('depth' => $depth, 'max_depth' => $args['max_depth']))) ?>
</nav>
</article>
<!-- </li> is added by wordpress automatically -->
<?php
}
automatic_feed_links();
// Widgetized Sidebar HTML5 Markup
if ( function_exists('register_sidebar') ) {
register_sidebar(array(
'before_widget' => '<section>',
'after_widget' => '</section>',
'before_title' => '<h2 class="widgettitle">',
'after_title' => '</h2>',
));
}
// Custom Functions for CSS/Javascript Versioning
$GLOBALS["TEMPLATE_URL"] = get_bloginfo('template_url')."/";
$GLOBALS["TEMPLATE_RELATIVE_URL"] = wp_make_link_relative($GLOBALS["TEMPLATE_URL"]);
// Add ?v=[last modified time] to style sheets
function versioned_stylesheet($relative_url, $add_attributes=""){
echo '<link rel="stylesheet" href="'.versioned_resource($relative_url).'" '.$add_attributes.'>'."\n";
}
// Add ?v=[last modified time] to javascripts
function versioned_javascript($relative_url, $add_attributes=""){
echo '<script src="'.versioned_resource($relative_url).'" '.$add_attributes.'></script>'."\n";
}
// Add ?v=[last modified time] to a file url
function versioned_resource($relative_url){
$file = $_SERVER["DOCUMENT_ROOT"].$relative_url;
$file_version = "";
if(file_exists($file)) {
$file_version = "?v=".filemtime($file);
}
return $relative_url.$file_version;
}
/* Add footer menu and main menu support as well as a custom nav walker
* for the footer menu
* */
function registerMenus () {
register_nav_menus( array(
'nav-main' => __( 'Top menu' ),
'nav-footer' => __( 'Footer Navigation' )
));
}
add_action('init','registerMenus');
// declare a global javascript variable for the url to the php file that handles ajax requests
wp_localize_script( 'my-ajax-request', 'MyAjax', array( 'ajaxurl' => admin_url( 'admin-ajax.php' ) ) );
// add a second editor box to the about template edit page so that
add_action('edit_page_form','add_second_metabox_about');
function add_second_metabox_about(){
$post_id = $_GET['post'] ? $_GET['post'] : $_POST['post_ID'] ;
$template_file = get_post_meta($post_id,'_wp_page_template',TRUE);
if ($template_file == 'about.php')
wp_editor(get_post_meta($post->ID,'biography'),'biography');
}
// save the about page biography box
add_action('save_post','save_about_data');
function save_about_data( $post ) {
$template_file = get_post_meta($post->ID,'_wp_page_template',TRUE);
if ( $template_file == 'about.php')
update_post_meta($post->ID, 'biography', $_POST['biography']);
}
// Remove the ability to add edit posts from the admin menu
// as we do not use the basic "post" post type
function remove_admin_menu_blog () {
// with WP 3.1 and higher
if ( function_exists( 'remove_menu_page' ) ) {
remove_menu_page( 'edit.php' );
remove_menu_page( 'edit-comments.php' );
}
}
add_action( 'admin_menu', 'remove_admin_menu_blog' );
// Utility function for removing an element from an array.
// Removes the first element of the array whose value is given
// Returns the modified array
// @param int $val - the value of the element to remove
// @param array &$array - an array to remove the element from
// Modified from: http://webit.ca/2011/08/php-array_remove/
function array_remove($val, &$array) {
foreach ($array as $i => $v) {
if ($val == $v){
unset($array[$i]);
return array_merge($array);
}
}
}
// register scripts/styles
function register_custom_scripts_styles(){
// Use a month picker for the work edit page. Credit to:
// http://lucianocosta.info/jquery.mtz.monthpicker/
wp_register_script(
'monthpicker',
get_template_directory_uri() . '/js/jquery.mtz.monthpicker.js',
array('jquery')
);
}
add_action('init','register_custom_scripts_styles');
|
f5bcea4b78e13f7f8b4cf673b1023146c632a30e
|
[
"JavaScript",
"PHP"
] | 10
|
JavaScript
|
hannaliebl/sandramathern.com
|
e7eaa64af5f7e1c40c10bcffc4712590bbfd9824
|
07405dd1a43e1c31f7fd45af5e8cecd8ef7ac9c1
|
refs/heads/master
|
<file_sep>using Microsoft.AspNetCore.Mvc;
namespace LoadGeneratorService.Controllers
{
[Route("probe")]
public class HealthController : Controller
{
// GET api/load/5
[HttpGet("health")]
public bool Health()
{
return true;
}
// GET api/load/5
[HttpGet("readiness")]
public bool Readiness()
{
return true;
}
}
}<file_sep>using System;
using Microsoft.AspNetCore.Http;
namespace LoadGeneratorService.Middleware.BasicLogging
{
public class LoggingMiddleware : Middleware
{
public LoggingMiddleware(RequestDelegate next) : base(next)
{
}
protected override void BeforeRequest(HttpContext context)
{
var host = context.Request.Host;
var path = context.Request.Path;
Console.WriteLine($"Request : {host + path}");
}
protected override void AfterRequest(HttpContext context)
{
Console.WriteLine($"Response : {context.Response.StatusCode}");
}
protected override void OnError(HttpContext context)
{
}
}
}
<file_sep>using System;
using System.Threading.Tasks;
using LoadGeneratorService.LoadGenerator;
using Microsoft.AspNetCore.Mvc;
namespace LoadGeneratorService.Controllers
{
[Route("api/load")]
public class LoadController : Controller
{
private readonly ILoad _load;
public LoadController(ILoad load)
{
_load = load;
}
// GET api/load/5
[HttpGet("{loadValue}")]
public async Task<string> Get(int loadValue)
{
var validationRequest = HttpContext.Request.Query["validate"].ToString();
var primes = await _load.ExecuteLoad(loadValue, !IsNullOrEmptyString(validationRequest));
return string.Join(", ", primes);
}
private bool IsNullOrEmptyString(string val)
{
if (val == null)
{
return true;
}
if (val.Equals(string.Empty))
{
return true;
}
return false;
}
}
}
<file_sep>using LoadGeneratorService;
using LoadGeneratorService.LoadGenerator;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
namespace LoadGenerationService.IntegrationTests.InjectionTests
{
public class TestStartUp : Startup
{
public TestStartUp(IConfiguration configuration) : base(configuration)
{
}
public void ConfigureTestServices(IServiceCollection services)
{
services.AddMvc();
services.AddTransient<ILoad, StubLoad>();
}
}
}
<file_sep>using System.Collections.Generic;
using System.Net.Http;
using System.Threading.Tasks;
using Newtonsoft.Json;
namespace LoadGeneratorService.LoadGenerator
{
public interface ILoad
{
Task<IList<int>> ExecuteLoad(int upTo, bool validate);
}
public class ValidationResponse
{
public ValidationResponse(bool validationResponse)
{
IsValid = validationResponse;
}
public bool IsValid { get; set; }
}
public class PrimeFinder : ILoad
{
public async Task<IList<int>> ExecuteLoad(int upTo, bool validate)
{
var primes = new List<int>();
for (var i = 2; i < upTo; i++)
{
var p = 0;
for (var j = 2; j < i; j++)
{
if (i % j == 0)
p = 1;
}
if (p != 0)
{
continue;
}
if (validate)
{
var validationResult = await Validate(i);
if (!validationResult)
{
continue;
}
}
primes.Add(i);
}
return primes;
}
private async Task<bool> Validate(int value)
{
using (var client = new HttpClient())
using (var response = await client.GetAsync("http://127.0.0.1:5001/api/validation/" + value.ToString()))
{
var data = await response.Content.ReadAsStringAsync();
var validationResponse = JsonConvert.DeserializeObject<ValidationResponse>(data);
return validationResponse.IsValid;
}
}
}
}
<file_sep>namespace LoadGeneratorService.LoadGenerator
{
public interface IExecutable
{
void Start();
void Stop();
}
}
<file_sep>using System.Threading;
using System.Threading.Tasks;
namespace LoadGeneratorService.LoadGenerator
{
public interface IBackgroundLoadGenerator : IExecutable
{
int SleepInterval { get; set; }
}
public class BacgroundLoadGenerator : IBackgroundLoadGenerator
{
private bool _executing = false;
private readonly CancellationTokenSource _cancellationTokenSource;
private IBackgroundLoadExecutor _backgroundLoadExecutor;
public BacgroundLoadGenerator(IBackgroundLoadExecutor backgroundLoadExecutor)
{
_cancellationTokenSource = new CancellationTokenSource();
_backgroundLoadExecutor = backgroundLoadExecutor;
}
public int SleepInterval { get; set; }
public void Start()
{
if (_executing)
{
return;
}
_executing = true;
Task.Run(() =>
{
while (_executing)
{
Thread.Sleep(SleepInterval);
_backgroundLoadExecutor.BlockingCollection.Add(5000);
}
}, _cancellationTokenSource.Token);
}
public void Stop()
{
_executing = false;
_cancellationTokenSource.Cancel();
}
}
}
<file_sep>using System;
using Autofac;
using Autofac.Extensions.DependencyInjection;
using LoadGeneratorService.LoadGenerator;
using LoadGeneratorService.Middleware.BasicLogging;
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Hosting;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
namespace LoadGeneratorService
{
public class Startup
{
public Startup(IConfiguration configuration)
{
Configuration = configuration;
}
public IConfiguration Configuration { get; }
// This method gets called by the runtime. Use this method to add services to the container.
public IServiceProvider ConfigureServices(IServiceCollection services)
{
services.AddMvc();
var builder = new ContainerBuilder();
builder.Populate(services);
builder.RegisterType<PrimeFinder>().As<ILoad>();
builder.RegisterType<BackgroundLoadExecutor>().As<IBackgroundLoadExecutor>().SingleInstance();
builder.Register(c => new BacgroundLoadGenerator(c.Resolve<IBackgroundLoadExecutor>()))
.As<IBackgroundLoadGenerator>();
var container = builder.Build();
var loadExecutor = container.Resolve<IBackgroundLoadExecutor>();
loadExecutor.Start();
var loadGenerator = container.Resolve<IBackgroundLoadGenerator>();
loadGenerator.SleepInterval = 500;
loadGenerator.Start();
return new AutofacServiceProvider(container);
}
// This method gets called by the runtime. Use this method to configure the HTTP request pipeline.
public void Configure(IApplicationBuilder app, IHostingEnvironment env)
{
if (env.IsDevelopment())
{
app.UseDeveloperExceptionPage();
}
app.UseLogging();
app.UseMvc();
}
}
}
<file_sep>using System;
using System.Collections.Concurrent;
using System.Threading;
using System.Threading.Tasks;
namespace LoadGeneratorService.LoadGenerator
{
public interface IBackgroundLoadExecutor : IExecutable
{
BlockingCollection<int> BlockingCollection { get; }
}
public class BackgroundLoadExecutor : IBackgroundLoadExecutor
{
private readonly ILoad _load;
private bool _executing = false;
private readonly CancellationTokenSource _cancellationTokenSource;
public BlockingCollection<int> BlockingCollection { get; set; }
public BackgroundLoadExecutor(ILoad load)
{
_cancellationTokenSource = new CancellationTokenSource();
BlockingCollection = new BlockingCollection<int>();
_load = load;
}
public void Start()
{
if (_executing)
{
return;
}
_executing = true;
Task.Run(() =>
{
try
{
foreach (var item in BlockingCollection.GetConsumingEnumerable())
{
_load.ExecuteLoad(item, false);
}
}
catch (Exception ex)
{
Console.WriteLine(ex.Message);
}
Console.WriteLine("Exiting the background load executor thread");
}, _cancellationTokenSource.Token);
}
public void Stop()
{
_executing = false;
_cancellationTokenSource.Cancel();
}
}
}
<file_sep>using Microsoft.AspNetCore.Mvc;
namespace ControllerExtension
{
[Route("api/addition")]
public class AdditionController : Controller
{
[HttpGet("add/{first}/{second}")]
public string Index(int first, int second)
{
return (first +second).ToString();
}
}
}<file_sep>using Microsoft.AspNetCore.Builder;
namespace LoadGeneratorService.Middleware.BasicLogging
{
public static class LoggingMiddlewareExtension
{
public static IApplicationBuilder UseLogging(this IApplicationBuilder builder)
{
return builder.UseMiddleware<LoggingMiddleware>();
}
}
}
<file_sep>using System.Threading.Tasks;
using FluentAssertions;
using LoadGeneratorService;
using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.TestHost;
using NUnit.Framework;
namespace LoadGenerationService.IntegrationTests
{
[TestFixture]
public class DefaultTest
{
[Test]
public async Task TestMethod1()
{
var webHostBuilder = new WebHostBuilder()
.UseEnvironment("Test")
.UseStartup<Startup>();
using (var server = new TestServer(webHostBuilder))
using (var client = server.CreateClient())
{
string result = await client.GetStringAsync("/api/load/8");
result.Should().Be("2, 3, 5, 7");
}
}
}
}
<file_sep>using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using LoadGeneratorService;
using LoadGeneratorService.LoadGenerator;
namespace LoadGenerationService.IntegrationTests.InjectionTests
{
public class StubLoad : ILoad
{
public Task<IList<int>> ExecuteLoad(int upTo, bool validate)
{
IList<int> elem = Enumerable.Range(0, upTo).ToList();
return Task.FromResult(elem);
}
}
}
<file_sep>using System;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Http;
namespace LoadGeneratorService.Middleware
{
public abstract class Middleware
{
protected readonly RequestDelegate _next;
protected Middleware(RequestDelegate next)
{
_next = next;
}
public async Task InvokeAsync(HttpContext context)
{
BeforeRequest(context);
try
{
await _next(context);
}
catch (Exception e)
{
OnError(context);
}
AfterRequest(context);
}
protected abstract void BeforeRequest(HttpContext context);
protected abstract void AfterRequest(HttpContext context);
protected abstract void OnError(HttpContext context);
}
}
|
98f0f2e3c3b3f598143d9e405706234bed1224ba
|
[
"C#"
] | 14
|
C#
|
destanoglu/LoadGeneratorService
|
ccf1caa859b339a339e4c6379d46ea3b489ce69c
|
e36ec26d404de3f32de6c8865de94fc856b8a64a
|
refs/heads/master
|
<file_sep>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package mandatoryassignment;
import java.io.File;
import javax.swing.JTextArea;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import org.w3c.dom.Document;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
/**
*
* @author Nichlas
*/
public class DomParser {
public DomParser(String file, JTextArea textArea) {
try {
textArea.setText("");
File myxmlfile = new File(file);
DocumentBuilderFactory builderFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder builder = builderFactory.newDocumentBuilder();
Document doc = builder.parse(myxmlfile);
textArea.append("Root element :" + doc.getDocumentElement().getNodeName());
if (doc.hasChildNodes()) {
printNode(doc.getChildNodes(), textArea);
}
} catch (Exception e) {
System.out.println(e.getMessage());
textArea.setText(e.getMessage());
}
}
private static void printNode(NodeList nodeList, JTextArea textArea) {
for (int i = 0; i < nodeList.getLength(); i++) {
Node mynode = nodeList.item(i);
if (mynode.getNodeType() == Node.ELEMENT_NODE) {
textArea.append("\n");
textArea.append("Node " + mynode.getNodeName() + " opened");
textArea.append("Node " + mynode.getNodeName() + " value:");
textArea.append(" { " + mynode.getTextContent() + " }");
if (mynode.hasAttributes()) {
NamedNodeMap nodeMap = mynode.getAttributes();
for (int j = 0; j < nodeMap.getLength(); j++) {
Node attnode = nodeMap.item(j);
textArea.append("Node " + mynode.getNodeName() + " attributes: ");
textArea.append(" [attr name = " + attnode.getNodeName());
textArea.append(", attr value = " + attnode.getNodeValue() + "]\n");
}
}
if (mynode.hasChildNodes()) {
printNode(mynode.getChildNodes(), textArea);
}
}
}
}
}
<file_sep>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package validation;
import java.io.File;
import javax.xml.XMLConstants;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.validation.Schema;
import javax.xml.validation.SchemaFactory;
import javax.xml.validation.Validator;
import org.w3c.dom.Document;
/**
*
* @author Rasmus
*/
public class DOMXMLValidator {
public String validateXmlFile(String xmlPath, String xsdPath) {
File myxmlfile = new File(xmlPath);
File myschemafile = new File(xsdPath);
Schema schema = null;
// load an XML Schema into the Schema instance
try
{
String language = XMLConstants.W3C_XML_SCHEMA_NS_URI;
SchemaFactory factory = SchemaFactory.newInstance(language);
schema = factory.newSchema(myschemafile);
// Parse the XML document to DOM Document, and then validate it
DocumentBuilderFactory builderFactory = DocumentBuilderFactory.newInstance();
builderFactory.setNamespaceAware(true);
DocumentBuilder builder = builderFactory.newDocumentBuilder();
Document doc = builder.parse(myxmlfile);
DOMSource source = new DOMSource(doc);
Validator validator = schema.newValidator();
validator.validate(source);
}
catch (Exception e)
{
e.printStackTrace();
return "Validation failed \n " + e.getMessage();
}
return "Succesfully validated xml file against its schema";
}
}
<file_sep>
package xpath;
import java.io.File;
import java.io.IOException;
import javax.swing.JTextArea;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpression;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.w3c.dom.Document;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
/**
*
* @author Nichlas
*/
public class XpathSearch {
public void getShippingInformationByCountry (JTextArea textArea, String Querry) throws SAXException, IOException, XPathExpressionException, ParserConfigurationException{
File myxmlfile = new File("productOrder.xml");
textArea.setText("");
// Path expressions
//String str1 = "//Employee[name='Jarl']/role/text()";
//String str2 = "count(//Employee[role='Manager'])";
String str1 = "//ShippingInformation[Country='"+Querry+"']/*/text()";
// String str2 = "";
// standard for reading an XML file
DocumentBuilderFactory builderFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder builder = builderFactory.newDocumentBuilder();
Document doc = builder.parse(myxmlfile);
// create an XPathFactory and an XPath object
XPathFactory xFactory = XPathFactory.newInstance();
XPath xpath = xFactory.newXPath();
XPathExpression expr1, expr2;
// compile the XPath expression to get the role of Jarl
expr1 = (XPathExpression) xpath.compile(str1);
// run the query and get a nodeset as a result
Object result = expr1.evaluate(doc, XPathConstants.NODESET);
// cast the result to a DOM NodeList
NodeList results = (NodeList) result;
for (int i=0; i<results.getLength(); i++)
{
System.out.println(results.item(i).getNodeValue());
textArea.append(results.item(i).getNodeValue() + "\n");
}
if (results.getLength() == 0) {
textArea.setText("Not found Search for another please");
}
// new XPath expression to get the number of Managers
// expr2 = (XPathExpression) xpath.compile(str2);
// run the query and get the number of nodes, returned result is Double
//Double number = (Double) expr2.evaluate(doc, XPathConstants.NUMBER);
//System.out.printf("Number of Managers:%2.0f\n", number);
}
public String getChildElements(String elementName) throws XPathExpressionException, SAXException, ParserConfigurationException, IOException {
File xmlFile = new File("productOrder.xml");
StringBuilder stringBuilder = new StringBuilder();
String xPathQuery = "//"+ elementName +"/*//text()";
DocumentBuilderFactory builderFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder builder = builderFactory.newDocumentBuilder();
Document doc = builder.parse(xmlFile);
XPathFactory xFactory = XPathFactory.newInstance();
XPath xpath = xFactory.newXPath();
XPathExpression expr1, expr2;
expr1 = (XPathExpression) xpath.compile(xPathQuery);
Object result = expr1.evaluate(doc, XPathConstants.NODESET);
NodeList results = (NodeList) result;
int count = 0;
for (int i=0; i<results.getLength(); i++)
{
stringBuilder.append(results.item(i).getNodeValue() + "\n");
count++;
}
stringBuilder.append("\n Found " + count + " elements");
return stringBuilder.toString();
}
public String getElement(String elementName) throws XPathExpressionException, SAXException, ParserConfigurationException, IOException {
File xmlFile = new File("productOrder.xml");
StringBuilder stringBuilder = new StringBuilder();
String xPathQuery = "//"+ elementName +"/text()";
DocumentBuilderFactory builderFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder builder = builderFactory.newDocumentBuilder();
Document doc = builder.parse(xmlFile);
XPathFactory xFactory = XPathFactory.newInstance();
XPath xpath = xFactory.newXPath();
XPathExpression expr1, expr2;
expr1 = (XPathExpression) xpath.compile(xPathQuery);
Object result = expr1.evaluate(doc, XPathConstants.NODESET);
NodeList results = (NodeList) result;
int count = 0;
for (int i=0; i<results.getLength(); i++)
{
stringBuilder.append(results.item(i).getNodeValue() + "\n");
count++;
}
stringBuilder.append("\n Found " + count + " occurences of element " + elementName);
return stringBuilder.toString();
}
public String getCustomQuery(String customQuery) throws XPathExpressionException, SAXException, ParserConfigurationException, IOException {
File xmlFile = new File("productOrder.xml");
StringBuilder stringBuilder = new StringBuilder();
String xPathQuery = customQuery;
DocumentBuilderFactory builderFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder builder = builderFactory.newDocumentBuilder();
Document doc = builder.parse(xmlFile);
XPathFactory xFactory = XPathFactory.newInstance();
XPath xpath = xFactory.newXPath();
XPathExpression expr1, expr2;
expr1 = (XPathExpression) xpath.compile(xPathQuery);
Object result = expr1.evaluate(doc, XPathConstants.NODESET);
NodeList results = (NodeList) result;
for (int i=0; i<results.getLength(); i++)
{
stringBuilder.append(results.item(i).getNodeValue() + "\n");
}
return stringBuilder.toString();
}
}
<file_sep>
package validation;
import java.io.File;
import javax.xml.XMLConstants;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.validation.Schema;
import javax.xml.validation.SchemaFactory;
import javax.xml.validation.Validator;
import org.w3c.dom.Document;
/**
*
* @author Dora
*/
/**
*
* First create an XSD Schema instance from the XSD schema file, then create a Validator instance from the schema instance.
* This Validator instance can then be used to validate an XML file represented with the DOM interface or the SAX interface
* For schema language see http://www.w3.org/TR/xmlschema-0/
*/
public class XMLDOMValidate
{
public static void main(String[] args)
{
File myxmlfile = new File("src/XML/productOrder.xml");
File myschemafile = new File("src/XML/ProductOrder.xsd");
Schema schema = null;
// load an XML Schema into the Schema instance
try
{
String language = XMLConstants.W3C_XML_SCHEMA_NS_URI;
SchemaFactory factory = SchemaFactory.newInstance(language);
schema = factory.newSchema(myschemafile);
// Parse the XML document to DOM Document, and then validate it
DocumentBuilderFactory builderFactory = DocumentBuilderFactory.newInstance();
builderFactory.setNamespaceAware(true);
DocumentBuilder builder = builderFactory.newDocumentBuilder();
Document doc = builder.parse(myxmlfile);
DOMSource source = new DOMSource(doc);
Validator validator = schema.newValidator();
validator.validate(source);
}
catch (Exception e)
{
e.printStackTrace();
}
}
}
|
47c396e4031712f192c8ce93f372ef0170d7729b
|
[
"Java"
] | 4
|
Java
|
nicaa/MandatoryAssignment-sysint
|
6dabf4a252bb446263b59c35c540f129cc43b700
|
07ab1dea3eae1282830224c00894ccc5c7c862ec
|
refs/heads/master
|
<file_sep># battleships
## https://dreadlocker.github.io/battleships2/
<file_sep>import Vue from "vue";
import Vuex from "vuex";
import * as types from "./types";
Vue.use(Vuex);
export default new Vuex.Store({
state: {
// game_grid can go up to 26
game_grid: 10,
alphabet: ["A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z"],
first_row_arr: [""],
rows_obj: {},
dot: ".",
battle_ships_arr: [5, 4, 4],
battle_ships_total_parts: null,
shots: 0,
shotText: ""
},
mutations: {
[types.FIRST_ROW_ARR]: (state, payload) => {
state.first_row_arr = payload;
},
[types.ROWS_OBJ]: (state, payload) => {
state.rows_obj = payload;
},
[types.BATTLE_SHIPS_ARR]: (state, payload) => {
state.battle_ships_arr = payload;
},
[types.BATTLE_SHIPS_TOTAL_PARTS]: (state, payload) => {
state.battle_ships_total_parts = payload;
},
[types.SHOT_TAKEN]: (state, payload) => {
state.shots = payload;
},
[types.SHOT_TEXT]: (state, payload) => {
state.shotText = payload;
},
},
actions: {
[types.ACTION_FIRST_ROW_ARR]({ commit }, payload) {
commit(types.FIRST_ROW_ARR, payload);
},
[types.ACTION_ROWS_OBJ]({ commit }, payload) {
commit(types.ROWS_OBJ, payload);
},
[types.ACTION_BATTLE_SHIPS_ARR]({ commit }, payload) {
commit(types.BATTLE_SHIPS_ARR, payload);
},
[types.ACTION_BATTLE_SHIPS_TOTAL_PARTS]({ commit }, payload) {
commit(types.BATTLE_SHIPS_TOTAL_PARTS, payload);
},
[types.ACTION_SHOT_TAKEN]({ commit }, payload) {
commit(types.SHOT_TAKEN, payload);
},
[types.ACTION_SHOT_TEXT]({ commit }, payload) {
commit(types.SHOT_TEXT, payload);
},
}
});
<file_sep>export const FIRST_ROW_ARR = "store/FIRST_ROW_ARR";
export const ROWS_OBJ = "store/ROWS_OBJ";
export const BATTLE_SHIPS_ARR = "store/BATTLE_SHIPS_ARR";
export const BATTLE_SHIPS_TOTAL_PARTS = "store/BATTLE_SHIPS_TOTAL_PARTS";
export const SHOT_TAKEN = "store/SHOT_TAKEN";
export const SHOT_TEXT = "store/SHOT_TEXT";
export const ACTION_FIRST_ROW_ARR = "store/ACTION_FIRST_ROW_ARR";
export const ACTION_ROWS_OBJ = "store/ACTION_ROWS_OBJ";
export const ACTION_BATTLE_SHIPS_ARR = "store/ACTION_BATTLE_SHIPS_ARR";
export const ACTION_BATTLE_SHIPS_TOTAL_PARTS = "store/ACTION_BATTLE_SHIPS_TOTAL_PARTS";
export const ACTION_SHOT_TAKEN = "store/ACTION_SHOT_TAKEN";
export const ACTION_SHOT_TEXT = "store/ACTION_SHOT_TEXT";
|
bc5a54a85c1e670ab48f4833d401394c40cee5e1
|
[
"Markdown",
"JavaScript"
] | 3
|
Markdown
|
dreadlocker/battleships
|
e0e1722cdf103a963d44557fc330035833c3689c
|
c93070080178a2437189f40e5590ca5ef804387d
|
refs/heads/master
|
<repo_name>thareUSGS/camera_model<file_sep>/python/generate_isd.py
import json
import pvl
import spiceypy as spice
def find_in_dict(obj, key):
"""
Recursively find an entry in a dictionary
Parameters
----------
obj : dict
The dictionary to search
key : str
The key to find in the dictionary
Returns
-------
item : obj
The value from the dictionary
"""
if key in obj:
return obj[key]
for k, v in obj.items():
if isinstance(v,dict):
item = find_in_dict(v, key)
if item is not None:
return item
def main(kernelid=236820):
spice.furnsh("../tests/data/msgr_mdis_v160.ti")
isd = {}
# Load information from the IK kernel
isd['focal_length'] = spice.gdpool('INS-{}_FOCAL_LENGTH'.format(kernelid), 0, 1).tolist()[0]
isd['focal_length_epsilon'] = spice.gdpool('INS-{}_FL_UNCERTAINTY'.format(kernelid), 0, 1).tolist()[0]
isd['nlines'] = spice.gipool('INS-{}_PIXEL_LINES'.format(kernelid), 0, 1).tolist()[0]
isd['nsamples'] = spice.gipool('INS-{}_PIXEL_SAMPLES'.format(kernelid), 0, 1).tolist()[0]
isd['original_half_lines'] = isd['nlines'] / 2.0
isd['orginal_half_samples'] = isd['nsamples'] / 2.0
isd['pixel_pitch'] = spice.gdpool('INS-{}_PIXEL_PITCH'.format(kernelid), 0, 1).tolist()[0]
isd['ccd_center'] = spice.gdpool('INS-{}_CCD_CENTER'.format(kernelid), 0, 1).tolist()[0]
isd['ifov'] = spice.gdpool('INS-{}_IFOV'.format(kernelid), 0, 1).tolist()[0]
isd['boresight'] = spice.gdpool('INS-{}_BORESIGHT'.format(kernelid), 0, 3).tolist()
isd['transx'] = spice.gdpool('INS-{}_TRANSX'.format(kernelid), 0, 3).tolist()
isd['transy'] = spice.gdpool('INS-{}_TRANSY'.format(kernelid), 0, 3).tolist()
isd['itrans_sample'] = spice.gdpool('INS-{}_ITRANSS'.format(kernelid), 0, 3).tolist()[0]
isd['itrans_line'] = spice.gdpool('INS-{}_ITRANSL'.format(kernelid), 0, 3).tolist()[0]
isd['odt_x'] = spice.gdpool('INS-{}_OD_T_X'.format(kernelid), 0, 9).tolist()
isd['odt_y'] = spice.gdpool('INS-{}_OD_T_Y'.format(kernelid), 0, 9).tolist()
isd['starting_detector_sample'] = spice.gdpool('INS-{}_FPUBIN_START_SAMPLE'.format(kernelid), 0, 1).tolist()[0]
isd['starting_detector_line'] = spice.gdpool('INS-{}_FPUBIN_START_LINE'.format(kernelid), 0, 1).tolist()[0]
# Load the ISIS Cube header
header = pvl.load('../tests/data/CN0108840044M_IF_5_NAC_spiced.cub')
isd['instrument_id'] = find_in_dict(header, 'InstrumentId')
isd['spacecraft_name'] = find_in_dict(header, 'SpacecraftName')
#Time
# Load LeapSecond Kernel
spice.furnsh('../tests/data/naif0011.tls.txt')
start_ephemeris_time = spice.str2et(find_in_dict(header, 'StartTime').isoformat())
stop_ephemeris_time = spice.str2et(find_in_dict(header, 'StopTime').isoformat())
# Total hack - short on time - set the et to be the start time - this is WRONG
isd['ephemeris_time'] = start_ephemeris_time
# OPK and Position - the logic to get these is in Anne's SocetCode and the
# called ISIS functions - starts on line 418 of the cpp
isd['x_sensor_origin'] = None
isd['y_sensor_origin'] = None
isd['z_sensor_origin'] = None
isd['omega'] = None
isd['phi'] = None
isd['kappa'] = None
# ISD Search Information - totally fabricated - where do we get these?
isd['min_elevation'] = -1.0
isd['max_elevation'] = 1.0
# Write it out
with open('hardcoded.isd', 'w') as f:
json.dump(isd, f, sort_keys=True, indent=4)
if __name__ == '__main__':
main()
<file_sep>/include/transformations/transformations.h
#ifndef TRANSFORMATION_H
#define TRANSFORMATION_H
#include <Eigen/Dense>
using namespace Eigen;
Matrix3d opkToRotation(float omega, float phi, float kappa);
#endif
<file_sep>/tests/CMakeLists.txt
#ADD_EXECUTABLE(mdisset mdisset.cpp)
#TARGET_INCLUDE_DIRECTORIES(mdisset PUBLIC "${CMAKE_SOURCE_DIR}/include")
# Need line below for EXPECT_THROW to avoid:
# "warning: non-static data member initializers only
# available with -std=c++11 or -std=gnu++11"
# Find libcsmapi.so
FIND_LIBRARY(CSMAPI_LIBRARY csmapi "${CMAKE_SOURCE_DIR}/lib")
# Find the unit tests
FILE(GLOB SRC_FILES ${CMAKE_SOURCE_DIR}/tests/ *Test.cpp)
ADD_EXECUTABLE(runTests runTests.cpp ${SRC_FILES} ${HEADER_FILES})
TARGET_INCLUDE_DIRECTORIES(runTests PUBLIC
${CMAKE_SOURCE_DIR}/include/mdis
${CMAKE_SOURCE_DIR}/include/json
${CMAKE_SOURCE_DIR}/include/
${CMAKE_SOURCE_DIR}/include/transformations
${EIGEN3_INCLUDE_DIR})
TARGET_LINK_LIBRARIES(runTests gtest_main
MdisPlugin
MdisNacSensorModel
IsdReader
Transformations
${CSMAPI_LIBRARY})
<file_sep>/README.md
# camera_model
## Developers:
jlaura
tjwilson271828
ihumphrey-usgs
MakaylaS
MarjorieH
CurtisRose
<file_sep>/cmake/External-Spice.cmake
message( "External project - Spice" )
IF(APPLE)
SET(SPICE_URL "http://naif.jpl.nasa.gov/pub/naif/toolkit/C/MacIntel_OSX_AppleC_64bit/packages/cspice.tar.Z")
ELSEIF(UNIX)
SET(SPICE_URL "http://naif.jpl.nasa.gov/pub/naif/toolkit//C/PC_Linux_GCC_64bit/packages/cspice.tar.Z")
ENDIF()
ExternalProject_Add( Spice
URL "${SPICE_URL}"
DOWNLOAD_NAME cspice.tar.gz
UPDATE_COMMAND ""
CONFIGURE_COMMAND ""
BUILD_COMMAND ""
INSTALL_COMMAND
${CMAKE_COMMAND} -E copy
${CMAKE_BINARY_DIR}/Spice-prefix/src/lib/cspice.a
${CMAKE_BINARY_DIR}/Spice-prefix/src/lib/libcspice.a &&
${CMAKE_COMMAND} -E copy_directory
${CMAKE_BINARY_DIR}/Spice-prefix/src/Spice/
${INSTALL_DEPENDENCIES_DIR}/cspice
)
<file_sep>/tests/TransformationsTest.cpp
#include <math.h>
#include <transformations.h>
#include <Eigen/Dense>
#include <iostream>
#include <gtest/gtest.h>
class TransformationsTest : public ::testing::Test {
protected:
virtual void SetUp(){
tolerance = 0.0001;
}
double tolerance;
};
TEST_F(TransformationsTest, OpkToRotation){
// This is right out of Mikhail Modern Photogrammetry p.95
Eigen::Matrix3d rot;
rot << 0.9622, 0.2616, -0.0751,
-0.2578, 0.9645, 0.0562,
0.0871, -0.0348, 0.9956;
float o = (2 * M_PI) / 180;
float p = (5 * M_PI) / 180;
float k = (15 * M_PI) / 180;
ASSERT_TRUE(rot.isApprox(opkToRotation(o,p,k), tolerance));
}
<file_sep>/src/objs/MdisNacSensorModel.cpp
#include "MdisNacSensorModel.h"
#include <iomanip>
#include <iostream>
#include <sstream>
#include <csm/Error.h>
using namespace std;
// Declaration of static variables
const std::string MdisNacSensorModel::_SENSOR_MODEL_NAME
= "ISIS_MDISNAC_USGSAstro_1_Linux64_csm30.so";
const int MdisNacSensorModel::m_numParameters = 6;
const std::string MdisNacSensorModel::m_parameterName[] = {
"X Sensor Position (m)", // 0
"Y Sensor Position (m)", // 1
"Z Sensor Position (m)", // 2
"Omega (radians)", // 3
"Phi (radians)", // 4
"Kappa (radians)" // 5
};
MdisNacSensorModel::MdisNacSensorModel() {
m_transX[0] = 0.0;
m_transX[1] = 0.0;
m_transX[2] = 0.0;
m_transY[0] = 0.0;
m_transY[1] = 0.0;
m_transY[2] = 0.0;
m_iTransS[0] = 0.0;
m_iTransS[1] = 0.0;
m_iTransS[2] = 0.0;
m_iTransL[0] = 0.0;
m_iTransL[0] = 0.0;
m_iTransL[0] = 0.0;
m_majorAxis = 0.0;
m_minorAxis = 0.0;
m_focalLength = 0.0;
m_spacecraftVelocity[0] = 0.0;
m_spacecraftVelocity[1] = 0.0;
m_spacecraftVelocity[2] = 0.0;
m_sunPosition[0] = 0.0;
m_sunPosition[1] = 0.0;
m_sunPosition[2] = 0.0;
m_startingDetectorSample = 0.0;
m_startingDetectorLine = 0.0;
m_targetName = "";
m_ifov = 0.0;
m_instrumentID = "";
m_focalLengthEpsilon = 0.0;
m_ccdCenter[0] = 0.0;
m_ccdCenter[1] = 0.0;
m_line_pp = 0.0;
m_sample_pp = 0.0;
m_odtX[0] = 0.0;
m_odtX[1] = 0.0;
m_odtX[2] = 0.0;
m_odtX[3] = 0.0;
m_odtX[4] = 0.0;
m_odtX[5] = 0.0;
m_odtX[6] = 0.0;
m_odtX[7] = 0.0;
m_odtX[8] = 0.0;
m_odtX[9] = 0.0;
m_odtY[0] = 0.0;
m_odtY[1] = 0.0;
m_odtY[2] = 0.0;
m_odtY[3] = 0.0;
m_odtY[4] = 0.0;
m_odtY[5] = 0.0;
m_odtY[6] = 0.0;
m_odtY[7] = 0.0;
m_odtY[8] = 0.0;
m_odtY[9] = 0.0;
m_originalHalfLines = 0.0;
m_spacecraftName = "";
m_pixelPitch = 0.0;
m_iTransS[0] = 0.0;
m_iTransS[1] = 0.0;
m_iTransS[2] = 0.0;
m_iTransL[0] = 0.0;
m_iTransL[1] = 0.0;
m_iTransL[2] = 0.0;
m_ephemerisTime = 0.0;
m_originalHalfSamples = 0.0;
m_boresight[0] = 0.0;
m_boresight[1] = 0.0;
m_boresight[2] = 0.0;
m_nLines = 0;
m_nSamples = 0;
// Initialize parameter values
m_currentParameterValue.assign(m_numParameters, 0.0);
m_currentParameterCovariance.assign(m_numParameters*m_numParameters,0.0);
m_noAdjustments.assign(m_numParameters,0.0);
}
MdisNacSensorModel::~MdisNacSensorModel() {}
/**
* @brief MdisNacSensorModel::groundToImage
* @param groundPt
* @param desiredPrecision
* @param achievedPrecision
* @param warnings
* @return Returns <line, sample> coordinate in the image corresponding to the ground point
* without bundle adjustment correction.
*/
csm::ImageCoord MdisNacSensorModel::groundToImage(const csm::EcefCoord &groundPt,
double desiredPrecision,
double *achievedPrecision,
csm::WarningList *warnings) const {
return groundToImage(groundPt,m_noAdjustments,desiredPrecision,achievedPrecision,warnings);
}
/**
* @brief MdisNacSensorModel::groundToImage
* @param groundPt
* @param adjustments
* @param desired_precision
* @param achieved_precision
* @param warnings
* @return Returns <line,sample> coordinate in the image corresponding to the ground point.
* This function applies bundle adjustments to the final value.
*/
csm::ImageCoord MdisNacSensorModel::groundToImage(
const csm::EcefCoord& groundPt,
const std::vector<double>& adjustments,
double desired_precision,
double* achieved_precision,
csm::WarningList* warnings ) const {
double xl, yl, zl;
xl = m_currentParameterValue[0];
yl = m_currentParameterValue[1];
zl = m_currentParameterValue[2];
double x, y, z;
x = groundPt.x;
y = groundPt.y;
z = groundPt.z;
double xo, yo, zo;
xo = xl - x - getValue(0,adjustments);
yo = yl - y - getValue(1,adjustments);
zo = zl - z - getValue(2,adjustments);
double f;
f = m_focalLength;
// Camera rotation matrix
double m[3][3];
calcRotationMatrix(m,adjustments);
// Sensor position
double undistortedx, undistortedy, denom;
denom = m[0][2] * xo + m[1][2] * yo + m[2][2] * zo;
undistortedx = (f * (m[0][0] * xo + m[1][0] * yo + m[2][0] * zo)/denom) + m_sample_pp; //m_sample_pp like this assumes mm
undistortedy = (f * (m[0][1] * xo + m[1][1] * yo + m[2][1] * zo)/denom) + m_line_pp;
// Apply the distortion to the line/sample location and then convert back to line/sample
double distortedx, distortedy;
distortionFunction(undistortedx, undistortedy, distortedx, distortedy);
//Convert distorted mm into line/sample
double sample, line;
sample = m_iTransS[0] + m_iTransS[1] * distortedx + m_iTransS[2] * distortedx + m_ccdCenter[0] - 0.5;
line = m_iTransL[0] + m_iTransL[1] * distortedy + m_iTransL[2] * distortedy + m_ccdCenter[0] - 0.5;
return csm::ImageCoord(line, sample);
}
csm::ImageCoordCovar MdisNacSensorModel::groundToImage(const csm::EcefCoordCovar &groundPt,
double desiredPrecision,
double *achievedPrecision,
csm::WarningList *warnings) const {
throw csm::Error(csm::Error::UNSUPPORTED_FUNCTION,
"Unsupported function",
"MdisNacSensorModel::groundToImage");
}
csm::EcefCoord MdisNacSensorModel::imageToGround(const csm::ImageCoord &imagePt,
double height,
double desiredPrecision,
double *achievedPrecision,
csm::WarningList *warnings) const {
double sample = imagePt.samp;
double line = imagePt.line;
//Here is where we should be able to apply an adjustment to opk
double m[3][3];
calcRotationMatrix(m);
//Apply the principal point offset, assuming the pp is given in pixels
double xl, yl, zl, lo, so;
lo = line - m_line_pp;
so = sample - m_sample_pp;
//Convert from the pixel space into the metric space
double optical_center_x, optical_center_y, x_camera, y_camera;
optical_center_x = m_ccdCenter[0] - 0.5;
optical_center_y = m_ccdCenter[1] - 0.5;
y_camera = m_transY[0] + m_transY[1] * (lo - optical_center_y) + m_transY[2] * (lo - optical_center_y);
x_camera = m_transX[0] + m_transX[1] * (so - optical_center_x) + m_transX[2] * (so - optical_center_x);
// Apply the distortion model (remove distortion)
double undistorted_cameraX, undistorted_cameraY = 0.0;
setFocalPlane(x_camera, y_camera, undistorted_cameraX, undistorted_cameraY);
//Now back from distorted mm to pixels
double udx, udy; //distorted line and sample
udx = undistorted_cameraX;
udy = undistorted_cameraY;
xl = m[0][0] * udx + m[0][1] * udy - m[0][2] * -m_focalLength;
yl = m[1][0] * udx + m[1][1] * udy - m[1][2] * -m_focalLength;
zl = m[2][0] * udx + m[2][1] * udy - m[2][2] * -m_focalLength;
double x, y, z;
double xc, yc, zc;
xc = m_currentParameterValue[0];
yc = m_currentParameterValue[1];
zc = m_currentParameterValue[2];
// Intersect with some height about the ellipsoid.
losEllipsoidIntersect(height, xc, yc, zc, xl, yl, zl, x, y, z);
return csm::EcefCoord(x, y, z);
}
csm::EcefCoordCovar MdisNacSensorModel::imageToGround(const csm::ImageCoordCovar &imagePt, double height,
double heightVariance, double desiredPrecision,
double *achievedPrecision,
csm::WarningList *warnings) const {
throw csm::Error(csm::Error::UNSUPPORTED_FUNCTION,
"Unsupported function",
"MdisNacSensorModel::imageToGround");
}
csm::EcefLocus MdisNacSensorModel::imageToProximateImagingLocus(const csm::ImageCoord &imagePt,
const csm::EcefCoord &groundPt,
double desiredPrecision,
double *achievedPrecision,
csm::WarningList *warnings) const {
// Ignore the ground point?
return imageToRemoteImagingLocus(imagePt);
}
csm::EcefLocus MdisNacSensorModel::imageToRemoteImagingLocus(const csm::ImageCoord &imagePt,
double desiredPrecision,
double *achievedPrecision,
csm::WarningList *warnings) const {
// Find the line,sample on the focal plane (mm)
// CSM center = 0.5, MDIS IK center = 1.0
double col = imagePt.samp - (m_ccdCenter[0] - 0.5);
double row = imagePt.line - (m_ccdCenter[1] - 0.5);
double focalPlaneX = m_transX[0] + m_transX[1] * col + m_transX[2] * col;
double focalPlaneY = m_transY[0] + m_transY[1] * row + m_transY[2] * row;
// Distort
double undistortedFocalPlaneX = focalPlaneX;
double undistortedFocalPlaneY = focalPlaneY;
setFocalPlane(focalPlaneX, focalPlaneY, undistortedFocalPlaneX, undistortedFocalPlaneY);
// Get rotation matrix and transform to a body-fixed frame
double m[3][3];
calcRotationMatrix(m);
std::vector<double> lookC { undistortedFocalPlaneX, undistortedFocalPlaneY, m_focalLength };
std::vector<double> lookB {
m[0][0] * lookC[0] + m[0][1] * lookC[1] + m[0][2] * lookC[2],
m[1][0] * lookC[0] + m[1][1] * lookC[1] + m[1][2] * lookC[2],
m[2][0] * lookC[0] + m[2][1] * lookC[1] + m[2][2] * lookC[2]
};
// Get unit vector
double mag = sqrt(lookB[0] * lookB[0] + lookB[1] * lookB[1] + lookB[2] * lookB[2]);
std::vector<double> lookBUnit {
lookB[0] / mag,
lookB[1] / mag,
lookB[2] / mag
};
return csm::EcefLocus(m_currentParameterValue[0], m_currentParameterValue[1], m_currentParameterValue[2],
lookBUnit[0], lookBUnit[1], lookBUnit[2]);
}
csm::ImageCoord MdisNacSensorModel::getImageStart() const {
csm::ImageCoord start;
start.samp = m_startingDetectorSample;
start.line = m_startingDetectorLine;
return start;
}
csm::ImageVector MdisNacSensorModel::getImageSize() const {
csm::ImageVector size;
size.line = m_nLines;
size.samp = m_nSamples;
return size;
}
std::pair<csm::ImageCoord, csm::ImageCoord> MdisNacSensorModel::getValidImageRange() const {
throw csm::Error(csm::Error::UNSUPPORTED_FUNCTION,
"Unsupported function",
"MdisNacSensorModel::getValidImageRange");
}
std::pair<double, double> MdisNacSensorModel::getValidHeightRange() const {
throw csm::Error(csm::Error::UNSUPPORTED_FUNCTION,
"Unsupported function",
"MdisNacSensorModel::getValidHeightRange");
}
csm::EcefVector MdisNacSensorModel::getIlluminationDirection(const csm::EcefCoord &groundPt) const {
// ground (body-fixed) - sun (body-fixed) gives us the illumination direction.
return csm::EcefVector {
groundPt.x - m_sunPosition[0],
groundPt.y - m_sunPosition[1],
groundPt.z - m_sunPosition[2]
};
}
double MdisNacSensorModel::getImageTime(const csm::ImageCoord &imagePt) const {
// check if the image point is in range
if (imagePt.samp >= m_startingDetectorSample &&
imagePt.samp <= (m_startingDetectorSample + m_nSamples) &&
imagePt.line >= m_startingDetectorSample &&
imagePt.line <= (m_startingDetectorLine + m_nLines)) {
return m_ephemerisTime;
}
else {
throw csm::Error(csm::Error::BOUNDS,
"Image Coordinate out of Bounds",
"MdisNacSensorModel::getImageTime");
}
}
csm::EcefCoord MdisNacSensorModel::getSensorPosition(const csm::ImageCoord &imagePt) const {
// check if the image point is in range
if (imagePt.samp >= m_startingDetectorSample &&
imagePt.samp <= (m_startingDetectorSample + m_nSamples) &&
imagePt.line >= m_startingDetectorSample &&
imagePt.line <= (m_startingDetectorLine + m_nLines)) {
csm::EcefCoord sensorPosition;
sensorPosition.x = m_currentParameterValue[0];
sensorPosition.y = m_currentParameterValue[1];
sensorPosition.z = m_currentParameterValue[2];
return sensorPosition;
}
else {
throw csm::Error(csm::Error::BOUNDS,
"Image Coordinate out of Bounds",
"MdisNacSensorModel::getSensorPosition");
}
}
csm::EcefCoord MdisNacSensorModel::getSensorPosition(double time) const {
throw csm::Error(csm::Error::UNSUPPORTED_FUNCTION,
"Unsupported function",
"MdisNacSensorModel::getSensorPosition");
}
csm::EcefVector MdisNacSensorModel::getSensorVelocity(const csm::ImageCoord &imagePt) const {
// Make sure the passed coordinate is with the image dimensions.
if (imagePt.samp < 0.0 || imagePt.samp > m_nSamples ||
imagePt.line < 0.0 || imagePt.line > m_nLines) {
throw csm::Error(csm::Error::BOUNDS, "Image coordinate out of bounds.",
"MdisNacSensorModel::getSensorVelocity");
}
// Since this is a frame, just return the sensor velocity the ISD gave us.
return csm::EcefVector {
m_spacecraftVelocity[0],
m_spacecraftVelocity[1],
m_spacecraftVelocity[2]
};
}
csm::EcefVector MdisNacSensorModel::getSensorVelocity(double time) const {
throw csm::Error(csm::Error::UNSUPPORTED_FUNCTION,
"Unsupported function",
"MdisNacSensorModel::getSensorVelocity");
}
csm::RasterGM::SensorPartials MdisNacSensorModel::computeSensorPartials(int index, const csm::EcefCoord &groundPt,
double desiredPrecision,
double *achievedPrecision,
csm::WarningList *warnings) const {
throw csm::Error(csm::Error::UNSUPPORTED_FUNCTION,
"Unsupported function",
"MdisNacSensorModel::computeSensorPartials");
}
/**
* @brief MdisNacSensorModel::computeSensorPartials
* @param index
* @param imagePt
* @param groundPt
* @param desiredPrecision
* @param achievedPrecision
* @param warnings
* @return The partial derivatives in the line,sample directions.
*
* Research: We should investigate using a central difference scheme to approximate
* the partials. It is more accurate, but it might be costlier calculation-wise.
*
*/
csm::RasterGM::SensorPartials MdisNacSensorModel::computeSensorPartials(int index,
const csm::ImageCoord &imagePt,
const csm::EcefCoord &groundPt,
double desiredPrecision,
double *achievedPrecision,
csm::WarningList *warnings) const {
const double delta = 1.0;
std::vector<double> adjustments(m_numParameters, 0.0);
adjustments[index] = delta;
csm::ImageCoord imagePt1 = groundToImage(groundPt,adjustments,desiredPrecision,achievedPrecision);
cout << "Img1 line: " << imagePt1.line << " ,Img1 sample: " << imagePt1.samp << endl;
csm::RasterGM::SensorPartials partials;
partials.first = (imagePt1.line - imagePt.line)/delta;
partials.second = (imagePt1.samp - imagePt.samp)/delta;
return partials;
}
std::vector<double> MdisNacSensorModel::computeGroundPartials(const csm::EcefCoord &groundPt) const {
throw csm::Error(csm::Error::UNSUPPORTED_FUNCTION,
"Unsupported function",
"MdisNacSensorModel::computeGroundPartials");
}
const csm::CorrelationModel& MdisNacSensorModel::getCorrelationModel() const {
throw csm::Error(csm::Error::UNSUPPORTED_FUNCTION,
"Unsupported function",
"MdisNacSensorModel::getCorrelationModel");
}
std::vector<double> MdisNacSensorModel::getUnmodeledCrossCovariance(const csm::ImageCoord &pt1,
const csm::ImageCoord &pt2) const {
throw csm::Error(csm::Error::UNSUPPORTED_FUNCTION,
"Unsupported function",
"MdisNacSensorModel::getUnmodeledCrossCovariance");
}
csm::Version MdisNacSensorModel::getVersion() const {
throw csm::Error(csm::Error::UNSUPPORTED_FUNCTION,
"Unsupported function",
"MdisNacSensorModel::getVersion");
}
std::string MdisNacSensorModel::getModelName() const {
throw csm::Error(csm::Error::UNSUPPORTED_FUNCTION,
"Unsupported function",
"MdisNacSensorModel::getModelName");
}
std::string MdisNacSensorModel::getPedigree() const {
throw csm::Error(csm::Error::UNSUPPORTED_FUNCTION,
"Unsupported function",
"MdisNacSensorModel::getPedigree");
}
std::string MdisNacSensorModel::getImageIdentifier() const {
throw csm::Error(csm::Error::UNSUPPORTED_FUNCTION,
"Unsupported function",
"MdisNacSensorModel::getImageIdentifier");
}
void MdisNacSensorModel::setImageIdentifier(const std::string& imageId,
csm::WarningList* warnings) {
throw csm::Error(csm::Error::UNSUPPORTED_FUNCTION,
"Unsupported function",
"MdisNacSensorModel::setImageIdentifier");
}
std::string MdisNacSensorModel::getSensorIdentifier() const {
throw csm::Error(csm::Error::UNSUPPORTED_FUNCTION,
"Unsupported function",
"MdisNacSensorModel::getSensorIdentifier");
}
std::string MdisNacSensorModel::getPlatformIdentifier() const {
throw csm::Error(csm::Error::UNSUPPORTED_FUNCTION,
"Unsupported function",
"MdisNacSensorModel::getPlatformIdentifier");
}
std::string MdisNacSensorModel::getCollectionIdentifier() const {
throw csm::Error(csm::Error::UNSUPPORTED_FUNCTION,
"Unsupported function",
"MdisNacSensorModel::getCollectionIdentifier");
}
std::string MdisNacSensorModel::getTrajectoryIdentifier() const {
throw csm::Error(csm::Error::UNSUPPORTED_FUNCTION,
"Unsupported function",
"MdisNacSensorModel::getTrajectoryIdentifier");
}
std::string MdisNacSensorModel::getSensorType() const {
throw csm::Error(csm::Error::UNSUPPORTED_FUNCTION,
"Unsupported function",
"MdisNacSensorModel::getSensorType");
}
std::string MdisNacSensorModel::getSensorMode() const {
throw csm::Error(csm::Error::UNSUPPORTED_FUNCTION,
"Unsupported function",
"MdisNacSensorModel::getSensorMode");
}
std::string MdisNacSensorModel::getReferenceDateAndTime() const {
throw csm::Error(csm::Error::UNSUPPORTED_FUNCTION,
"Unsupported function",
"MdisNacSensorModel::getReferenceDateAndTime");
}
std::string MdisNacSensorModel::getModelState() const {
// TEMPORARY
/* commented out for testing the gtest framework
throw csm::Error(csm::Error::UNSUPPORTED_FUNCTION,
"Unsupported function",
"MdisNacSensorModel::getModelState");
*/
return "";
}
void MdisNacSensorModel::replaceModelState(const std::string& argState) {
throw csm::Error(csm::Error::UNSUPPORTED_FUNCTION,
"Unsupported function",
"MdisNacSensorModel::replaceModelState");
}
csm::EcefCoord MdisNacSensorModel::getReferencePoint() const {
throw csm::Error(csm::Error::UNSUPPORTED_FUNCTION,
"Unsupported function",
"MdisNacSensorModel::getReferencePoint");
}
void MdisNacSensorModel::setReferencePoint(const csm::EcefCoord &groundPt) {
throw csm::Error(csm::Error::UNSUPPORTED_FUNCTION,
"Unsupported function",
"MdisNacSensorModel::setReferencePoint");
}
int MdisNacSensorModel::getNumParameters() const {
return m_numParameters;
}
std::string MdisNacSensorModel::getParameterName(int index) const {
return m_parameterName[index];
}
std::string MdisNacSensorModel::getParameterUnits(int index) const {
if (index < 3) {
return "m";
}
else {
return "radians";
}
}
bool MdisNacSensorModel::hasShareableParameters() const {
throw csm::Error(csm::Error::UNSUPPORTED_FUNCTION,
"Unsupported function",
"MdisNacSensorModel::hasShareableParameters");
}
bool MdisNacSensorModel::isParameterShareable(int index) const {
throw csm::Error(csm::Error::UNSUPPORTED_FUNCTION,
"Unsupported function",
"MdisNacSensorModel::isParameterShareable");
}
csm::SharingCriteria MdisNacSensorModel::getParameterSharingCriteria(int index) const {
throw csm::Error(csm::Error::UNSUPPORTED_FUNCTION,
"Unsupported function",
"MdisNacSensorModel::getParameterSharingCriteria");
}
double MdisNacSensorModel::getParameterValue(int index) const {
return m_currentParameterValue[index];
}
void MdisNacSensorModel::setParameterValue(int index, double value) {
m_currentParameterValue[index] = value;
}
csm::param::Type MdisNacSensorModel::getParameterType(int index) const {
throw csm::Error(csm::Error::UNSUPPORTED_FUNCTION,
"Unsupported function",
"MdisNacSensorModel::getParameterType");
}
void MdisNacSensorModel::setParameterType(int index, csm::param::Type pType) {
throw csm::Error(csm::Error::UNSUPPORTED_FUNCTION,
"Unsupported function",
"MdisNacSensorModel::setParameterType");
}
double MdisNacSensorModel::getParameterCovariance(int index1, int index2) const {
throw csm::Error(csm::Error::UNSUPPORTED_FUNCTION,
"Unsupported function",
"MdisNacSensorModel::getParameterCovariance");
}
void MdisNacSensorModel::setParameterCovariance(int index1, int index2, double covariance) {
throw csm::Error(csm::Error::UNSUPPORTED_FUNCTION,
"Unsupported function",
"MdisNacSensorModel::setParameterCovariance");
}
int MdisNacSensorModel::getNumGeometricCorrectionSwitches() const {
throw csm::Error(csm::Error::UNSUPPORTED_FUNCTION,
"Unsupported function",
"MdisNacSensorModel::getNumGeometricCorrectionSwitches");
}
std::string MdisNacSensorModel::getGeometricCorrectionName(int index) const {
throw csm::Error(csm::Error::UNSUPPORTED_FUNCTION,
"Unsupported function",
"MdisNacSensorModel::getGeometricCorrectionName");
}
void MdisNacSensorModel::setGeometricCorrectionSwitch(int index,
bool value,
csm::param::Type pType) {
throw csm::Error(csm::Error::UNSUPPORTED_FUNCTION,
"Unsupported function",
"MdisNacSensorModel::setGeometricCorrectionSwitch");
}
bool MdisNacSensorModel::getGeometricCorrectionSwitch(int index) const {
throw csm::Error(csm::Error::UNSUPPORTED_FUNCTION,
"Unsupported function",
"MdisNacSensorModel::getGeometricCorrectionSwitch");
}
std::vector<double> MdisNacSensorModel::getCrossCovarianceMatrix(
const GeometricModel &comparisonModel,
csm::param::Set pSet,
const GeometricModelList &otherModels) const {
throw csm::Error(csm::Error::UNSUPPORTED_FUNCTION,
"Unsupported function",
"MdisNacSensorModel::getCrossCovarianceMatrix");
}
void MdisNacSensorModel::calcRotationMatrix(
double m[3][3]) const {
// Trigonometric functions for rotation matrix
double sinw = std::sin(m_currentParameterValue[3]);
double cosw = std::cos(m_currentParameterValue[3]);
double sinp = std::sin(m_currentParameterValue[4]);
double cosp = std::cos(m_currentParameterValue[4]);
double sink = std::sin(m_currentParameterValue[5]);
double cosk = std::cos(m_currentParameterValue[5]);
// Rotation matrix taken from Introduction to Mordern Photogrammetry by
// <NAME>, et al., p. 373
m[0][0] = cosp * cosk;
m[0][1] = cosw * sink + sinw * sinp * cosk;
m[0][2] = sinw * sink - cosw * sinp * cosk;
m[1][0] = -1 * cosp * sink;
m[1][1] = cosw * cosk - sinw * sinp * sink;
m[1][2] = sinw * cosk + cosw * sinp * sink;
m[2][0] = sinp;
m[2][1] = -1 * sinw * cosp;
m[2][2] = cosw * cosp;
}
void MdisNacSensorModel::calcRotationMatrix(
double m[3][3], const std::vector<double> &adjustments) const {
// Trigonometric functions for rotation matrix
double sinw = std::sin(getValue(3,adjustments));
double cosw = std::cos(getValue(3,adjustments));
double sinp = std::sin(getValue(4,adjustments));
double cosp = std::cos(getValue(4,adjustments));
double sink = std::sin(getValue(5,adjustments));
double cosk = std::cos(getValue(5,adjustments));
m[0][0] = cosp * cosk;
m[0][1] = cosw * sink + sinw * sinp * cosk;
m[0][2] = sinw * sink - cosw * sinp * cosk;
m[1][0] = -1 * cosp * sink;
m[1][1] = cosw * cosk - sinw * sinp * sink;
m[1][2] = sinw * cosk + cosw * sinp * sink;
m[2][0] = sinp;
m[2][1] = -1 * sinw * cosp;
m[2][2] = cosw * cosp;
}
void MdisNacSensorModel::losEllipsoidIntersect(
const double& height,
const double& xc,
const double& yc,
const double& zc,
const double& xl,
const double& yl,
const double& zl,
double& x,
double& y,
double& z ) const
{
// Helper function which computes the intersection of the image ray
// with an expanded ellipsoid. All vectors are in earth-centered-fixed
// coordinate system with origin at the center of the earth.
double ap, bp, k;
ap = m_majorAxis + height;
bp = m_minorAxis + height;
k = ap * ap / (bp * bp);
// Solve quadratic equation for scale factor
// applied to image ray to compute ground point
double at, bt, ct, quadTerm;
at = xl * xl + yl * yl + k * zl * zl;
bt = 2.0 * (xl * xc + yl * yc + k * zl * zc);
ct = xc * xc + yc * yc + k * zc * zc - ap * ap;
quadTerm = bt * bt - 4.0 * at * ct;
// If quadTerm is negative, the image ray does not
// intersect the ellipsoid. Setting the quadTerm to
// zero means solving for a point on the ray nearest
// the surface of the ellisoid.
if ( 0.0 > quadTerm )
{
quadTerm = 0.0;
}
double scale;
scale = (-bt - sqrt (quadTerm)) / (2.0 * at);
// Compute ground point vector
x = xc + scale * xl;
y = yc + scale * yl;
z = zc + scale * zl;
}
/**
* @brief Compute undistorted focal plane x/y.
*
* Computes undistorted focal plane (x,y) coordinates given a distorted focal plane (x,y)
* coordinate. The undistorted coordinates are solved for using the Newton-Raphson
* method for root-finding if the distortionFunction method is invoked.
*
* @param dx distorted focal plane x in millimeters
* @param dy distorted focal plane y in millimeters
* @param undistortedX The undistorted x coordinate, in millimeters.
* @param undistortedY The undistorted y coordinate, in millimeters.
*
* @return if the conversion was successful
* @todo Review the tolerance and maximum iterations of the root-
* finding algorithm.
* @todo Review the handling of non-convergence of the root-finding
* algorithm.
* @todo Add error handling for near-zero determinant.
*/
bool MdisNacSensorModel::setFocalPlane(double dx,double dy,
double &undistortedX,
double &undistortedY ) const {
// Solve the distortion equation using the Newton-Raphson method.
// Set the error tolerance to about one millionth of a NAC pixel.
const double tol = 1.4E-5;
// The maximum number of iterations of the Newton-Raphson method.
const int maxTries = 60;
double x;
double y;
double fx;
double fy;
double Jxx;
double Jxy;
double Jyx;
double Jyy;
// Initial guess at the root
x = dx;
y = dy;
distortionFunction(x, y, fx, fy);
for (int count = 1; ((fabs(fx) + fabs(fy)) > tol) && (count < maxTries); count++) {
this->distortionFunction(x, y, fx, fy);
fx = dx - fx;
fy = dy - fy;
distortionJacobian(x, y, Jxx, Jxy, Jyx, Jyy);
double determinant = Jxx * Jyy - Jxy * Jyx;
if (determinant < 1E-6) {
//
// Near-zero determinant. Add error handling here.
//
//-- Just break out and return with no convergence
break;
}
x = x + (Jyy * fx - Jxy * fy) / determinant;
y = y + (Jxx * fy - Jyx * fx) / determinant;
}
if ( (fabs(fx) + fabs(fy)) <= tol) {
// The method converged to a root.
undistortedX = x;
undistortedY = y;
}
else {
// The method did not converge to a root within the maximum
// number of iterations. Return with no distortion.
undistortedX = dx;
undistortedY = dy;
}
return true;
}
/**
* @description Jacobian of the distortion function. The Jacobian was computed
* algebraically from the function described in the distortionFunction
* method.
*
* @param x
* @param y
* @param Jxx Partial_xx
* @param Jxy Partial_xy
* @param Jyx Partial_yx
* @param Jyy Partial_yy
*/
void MdisNacSensorModel::distortionJacobian(double x, double y, double &Jxx, double &Jxy,
double &Jyx, double &Jyy) const {
double d_dx[10];
d_dx[0] = 0;
d_dx[1] = 1;
d_dx[2] = 0;
d_dx[3] = 2 * x;
d_dx[4] = y;
d_dx[5] = 0;
d_dx[6] = 3 * x * x;
d_dx[7] = 2 * x * y;
d_dx[8] = y * y;
d_dx[9] = 0;
double d_dy[10];
d_dy[0] = 0;
d_dy[1] = 0;
d_dy[2] = 1;
d_dy[3] = 0;
d_dy[4] = x;
d_dy[5] = 2 * y;
d_dy[6] = 0;
d_dy[7] = x * x;
d_dy[8] = 2 * x * y;
d_dy[9] = 3 * y * y;
Jxx = 0.0;
Jxy = 0.0;
Jyx = 0.0;
Jyy = 0.0;
for (int i = 0; i < 10; i++) {
Jxx = Jxx + d_dx[i] * m_odtX[i];
Jxy = Jxy + d_dy[i] * m_odtX[i];
Jyx = Jyx + d_dx[i] * m_odtY[i];
Jyy = Jyy + d_dy[i] * m_odtY[i];
}
}
/**
* @description Compute distorted focal plane (dx,dy) coordinate given an undistorted focal
* plane (ux,uy) coordinate. This describes the third order Taylor approximation to the
* distortion model.
*
* @param ux Undistored x
* @param uy Undistored y
* @param dx Result distorted x
* @param dy Result distorted y
*/
void MdisNacSensorModel::distortionFunction(double ux, double uy, double &dx, double &dy) const {
double f[10];
f[0] = 1;
f[1] = ux;
f[2] = uy;
f[3] = ux * ux;
f[4] = ux * uy;
f[5] = uy * uy;
f[6] = ux * ux * ux;
f[7] = ux * ux * uy;
f[8] = ux * uy * uy;
f[9] = uy * uy * uy;
dx = 0.0;
dy = 0.0;
for (int i = 0; i < 10; i++) {
dx = dx + f[i] * m_odtX[i];
dy = dy + f[i] * m_odtY[i];
}
}
/***** Helper Functions *****/
double MdisNacSensorModel::getValue(
int index,
const std::vector<double> &adjustments) const
{
return m_currentParameterValue[index] + adjustments[index];
}
<file_sep>/src/transformations/transformations.cpp
#include <math.h>
#include <transformations.h>
#include <iostream>
Matrix3d opkToRotation(float omega, float phi, float kappa) {
Matrix3d o;
o << 1, 0, 0,
0, cos(omega), sin(omega),
0, -sin(omega), cos(omega);
Matrix3d p;
p << cos(phi), 0, -sin(phi),
0, 1, 0,
sin(phi), 0, cos(phi);
Matrix3d k;
k << cos(kappa), sin(kappa), 0,
-sin(kappa), cos(kappa), 0,
0, 0, 1;
//Chain multiplication roation = k*p*o
k *= p;
k *= o;
return k;
}
<file_sep>/src/apps/spice2isd/CMakeLists.txt
INCLUDE_DIRECTORIES("${CMAKE_SOURCE_DIR}/include/mdis")
INCLUDE_DIRECTORIES("${SPICE_INCLUDE_DIR}")
LINK_DIRECTORIES("${SPICE_LIBRARIES_DIR}")
LINK_DIRECTORIES("/usr/lib64")
ADD_LIBRARY(CSpiceIsd SHARED CSpiceIsd.cpp)
ADD_LIBRARY(SpiceController SHARED SpiceController.cpp)
TARGET_LINK_LIBRARIES(SpiceController libcspice.a)
TARGET_LINK_LIBRARIES(CSpiceIsd SpiceController)
ADD_EXECUTABLE(spice2isd spice2isd.cpp)
TARGET_LINK_LIBRARIES(spice2isd CSpiceIsd)
TARGET_LINK_LIBRARIES(spice2isd libgdal.so)
<file_sep>/src/apps/set/set.cpp
#include <cmath>
#include <dlfcn.h>
#include <fstream>
#include <iostream>
#include <string>
#include <vector>
#include <csm.h>
#include <Isd.h>
#include <Plugin.h>
#include <gdal/gdal.h>
#include <gdal/gdal_priv.h>
#include <gdal/cpl_conv.h>
#include <gdal/cpl_string.h>
#include <IsdReader.h>
#include <MdisPlugin.h>
#include <MdisNacSensorModel.h>
using namespace std;
const double PI = 3.14159265358979323846;
void cubeArray(vector<vector<float> > *cube, GDALRasterBand *poBand);
double computeAngle(const std::vector<double> &v1, const std::vector<double> &v2);
double emissionAngle(const csm::EcefCoord &groundPt,
const csm::EcefVector &sensorLook);
double incidenceAngle(const csm::EcefCoord &groundPt,
const csm::EcefVector &illuminationDirection);
void writeCSV(const string &csvFile,
const vector<string> &csvHeaders,
const vector< vector<float> > &cubeData,
const vector< vector<csm::EcefCoord> > &groundPoints);
void writeCSV(const string &csvFilename,
const vector<string> &csvHeaders,
const vector< vector<float> > &cubeData,
const vector< vector< vector<float> > > &latLonPoints,
const vector< vector<double> > &emissionAngles,
const vector< vector<double> > &incidenceAngles);
int main(int argc, char *argv[]) {
// Default cube and isd for now so this will run if user doesn't input these files.
string isdFile("../../../tests/data/EN1007907102M.json");
string cubeFile("../../../tests/data/EN1007907102M.cub");
// User can provide ISD and cube if desired.
if (argc == 3) {
isdFile = argv[1];
cubeFile = argv[2];
}
else {
cout << "Usage: set <ISD.json> <cube.cub>\n";
cout << "Provide an ISD .json file and its associated cube .cub file.\n";
}
csm::Isd *isd = readISD(isdFile);
if (isd == nullptr) {
return 1;
}
// Find plugins TODO: probably need a dedicated area for plugins to load
void *pluginFile = dlopen("../../../src/objs/libMdisPlugin.so", RTLD_LAZY);
if (pluginFile == nullptr) {
cout << "Could not load plugin." << endl;
return 1;
}
// Choose the correct plugin (e.g. framing v. line-scan?)
//const csm::PluginList &plugins = csm::Plugin::getList();
const csm::Plugin *plugin = csm::Plugin::findPlugin("UsgsAstroFrameMdisPluginCSM");
if (plugin == nullptr) {
cout << "Could not find plugin: " << "UsgsAstroFrameMdisPluginCSM" << endl;
return 1;
}
MdisNacSensorModel *model;
// Initialize the MdisNacSensorModel from the ISD using the plugin
try {
model = dynamic_cast<MdisNacSensorModel*>
(plugin->constructModelFromISD(*isd, "ISIS_MDISNAC_USGSAstro_1_Linux64_csm30.so"));
}
catch (csm::Error &e) {
cout << e.what() << endl;
delete isd;
if (model != nullptr) {
delete model;
}
exit (EXIT_FAILURE);
}
if (model == nullptr) {
cout << "Could not construct the sensor model from the plugin." << endl;
delete isd;
return 0;
}
// Test the model's accuracy by visually comparing the results of the function calls
// csm::EcefCoord groundPoint;
// csm::ImageCoord imagePoint;
//
// for (int i = 0; i < 10; i++) {
// groundPoint = model->imageToGround(imagePoint, 0);
//
// cout << "Image Point (s, l) : (" << imagePoint.samp << ", " << imagePoint.line << "); "
// << "Ground Point (x, y, z) : (" << groundPoint.x << ", " << groundPoint.y << ", "
// << groundPoint.z << ")" << endl;
//
// //imagePoint = model->groundToImage(groundPoint);
// }
//Read from a cube using the GDAL API, and outputs the DN values to a 2D vector matrix
//string cubePath("../../../tests/data/CN0108840044M_IF_5_NAC_spiced.cub");
string cubePath(cubeFile);
GDALDataset *poDataset;
GDALRasterBand *poBand;
int nBlockXSize, nBlockYSize;
cout << endl;
cout << "Testing GDAL" << endl;
cout << endl;
GDALAllRegister();
poDataset = (GDALDataset *)GDALOpen(cubePath.c_str(),GA_ReadOnly);
if (poDataset == NULL) {
cout << "Could not open the: " + cubePath << endl;
delete isd;
delete model;
return 1;
}
else {
poBand = poDataset->GetRasterBand(1);
poBand->GetBlockSize(&nBlockXSize, &nBlockYSize);
cout << "Num samples = " << nBlockXSize << endl;
cout << "Num lines = " << nBlockYSize << endl;
//Read a band of data
vector< vector<float> > cubeMatrix;
cubeArray(&cubeMatrix,poBand);
// Get ground X,Y,Z for each pixel in image
vector< vector<csm::EcefCoord> > groundPoints;
// Get the emission and incidence angles for these XYZ's
vector< vector<double> > emissionAngles, incidenceAngles;
for (int line = 0; line < cubeMatrix.size(); line++) {
vector<csm::EcefCoord> groundLine;
vector<double> emaLine, inaLine;
for (int sample = 0; sample < cubeMatrix[line].size(); sample++) {
csm::ImageCoord imagePoint(line + 1, sample + 1);
csm::EcefCoord groundPoint(model->imageToGround(imagePoint, 0.0));
groundLine.push_back(groundPoint);
// Calculate the emission angle using CSM's getSensorPosition
csm::EcefLocus sensorLook(model->imageToRemoteImagingLocus(imagePoint));
emaLine.push_back(emissionAngle(groundPoint, sensorLook.direction));
// Calculate the incidence angle using CSM's illumination vector.
csm::EcefVector illumination(model->getIlluminationDirection(groundPoint));
inaLine.push_back(incidenceAngle(groundPoint, illumination));
}
groundPoints.push_back(groundLine);
emissionAngles.push_back(emaLine);
incidenceAngles.push_back(inaLine);
}
// calculate the radius
float r = sqrt(groundPoints[0][0].x * groundPoints[0][0].x +
groundPoints[0][0].y * groundPoints[0][0].y +
groundPoints[0][0].z * groundPoints[0][0].z);
// calculate the lat & lon for each ground coordinate
vector< vector < vector<float> > > latLonPoints;
for (int i = 0; i < cubeMatrix.size(); i++) {
vector< vector<float> > latLonLine;
for (int j = 0; j < cubeMatrix[0].size(); j++) {
// calculate the latitude & longitude
vector<float> latLon;
float theta = acos(groundPoints[i][j].z / r); // lat in radians
float phi = atan2(groundPoints[i][j].y, groundPoints[i][j].x); // lon in radians
latLon.push_back(theta * 180 / M_PI);
latLon.push_back(phi * 180 / M_PI);
latLonLine.push_back(latLon);
}
latLonPoints.push_back(latLonLine);
}
// Write to csv file
string csvFilename("ground.csv");
vector<string> csvHeaders {
"Line", "Sample", "DN", "Latitude", "Longitude", "Emission", "Incidence"
};
writeCSV(csvFilename, csvHeaders, cubeMatrix, latLonPoints, emissionAngles, incidenceAngles);
} //end else
delete isd;
delete model;
dlclose(pluginFile);
exit (EXIT_SUCCESS);
}
void cubeArray(vector <vector<float> > *cube,GDALRasterBand *poBand) {
vector<float> tempVector;
float *pafScanline;
int nsamps = poBand->GetXSize();
int nlines = poBand->GetYSize();
for (int j = 0;j<nlines;j++) {
pafScanline = (float *)CPLMalloc(sizeof(float)*nsamps);
poBand->RasterIO(GF_Read,0,j,nsamps,1,pafScanline,nsamps,1,GDT_Float32,0,0);
for (int i = 0;i < nsamps;i++) {
tempVector.push_back(pafScanline[i]);
}
cube->push_back(tempVector);
tempVector.clear();
CPLFree(pafScanline);
}
}
/**
* Computes an angle between two vectors by using the definition of the dot product.
*
* @param v1 First vector.
* @param v2 Second vector.
*
* @return @b double Returns the angle between two vectors (in degrees).
*/
double computeAngle(const std::vector<double> &v1, const std::vector<double> &v2) {
// Recall: dot(v1, v2) = ||v1|| * ||v2|| * cos(theta)
double dotProduct = v1[0] * v2[0] + v1[1] * v2[1] + v1[2] * v2[2];
double magnitude = sqrt(v1[0] * v1[0] + v1[1] * v1[1] + v1[2] * v1[2]) *
sqrt(v2[0] * v2[0] + v2[1] * v2[1] + v2[2] * v2[2]);
double theta = acos(dotProduct / magnitude);
// Convert to degrees.
return theta * (180 / PI);
}
/**
* Calculates the emission angle (degrees) between a ground point's normal and the sensor's look
* direction.
*
* @param groundPt Body-fixed ground point to use in calculation.
* @param sensorLook Body-fixed sensor look direction.
*
* @return @b double Returns the emission angle (in degrees) between ground point normal and the
* spacecraft position.
*/
double emissionAngle(const csm::EcefCoord &groundPt,
const csm::EcefVector &sensorLook) {
// Reverse the sensor look direction to get vector from ground to sensor.
std::vector<double> sensor {
-1 * sensorLook.x,
-1 * sensorLook.y,
-1 * sensorLook.z
};
std::vector<double> ground { groundPt.x, groundPt.y, groundPt.z };
return computeAngle(ground, sensor);
}
/**
* Calculates the incidence angle (degrees) between a ground point's normal and the illumination
* vector from the sun.
*
* @param groundPt Body-fixed ground point to use in calculation.
* @param illuminationDirection Body-fixed illumination vector of the sun.
*
* @return @b double Returns the incidence angle (in degrees) between ground point normal and the
* illumination vector.
*/
double incidenceAngle(const csm::EcefCoord &groundPt,
const csm::EcefVector &illuminationDirection) {
// Reverse the illumination to get vector from ground point to sun.
std::vector<double> sun {
-1 * illuminationDirection.x,
-1 * illuminationDirection.y,
-1 * illuminationDirection.z
};
// Solve for the angle between the vector from surface to sun and the ground position.
std::vector<double> ground { groundPt.x, groundPt.y, groundPt.z };
return computeAngle(ground, sun);
}
/**
* Writes a CSV file to the given destination.
*
* This CSV will contain a DN, X, Y, and Z for each Line,Sample of the input cube data.
*
* @param csvFilename Name of the output CSV file to write to.
* @param csvHeaders Vector containing the header elements to write to the CSV file.
* @param cubeData Matrix of cube DNs.
* @param groundPoints Matrix of ground points for the image pixels.
*/
void writeCSV(const string &csvFilename,
const vector<string> &csvHeaders,
const vector< vector<float> > &cubeData,
const vector< vector<csm::EcefCoord> > &groundPoints) {
ofstream csvFile(csvFilename);
if (csvFile.is_open()) {
// Write the csv header
for (int str = 0; str < csvHeaders.size() - 1; str++) {
csvFile << csvHeaders[str] << ", ";
}
// Write the last header element
csvFile << csvHeaders[csvHeaders.size() - 1] << "\n";
// Write the csv records
for (int line = 0; line < cubeData.size(); line++) {
for (int sample = 0; sample < cubeData[line].size(); sample++) {
csm::EcefCoord ground = groundPoints[line][sample];
csvFile << line + 1 << ", " << sample + 1 << ", " << cubeData[line][sample] << ", "
<< ground.x/1000 << ", " << ground.y/1000 << ", " << ground.z/1000 << "\n";
}
}
}
else {
cout << "\nUnable to open file \"" << csvFilename << " for writing." << endl;
return;
}
}
/**
* Writes a CSV file to the given destination.
*
* This CSV will contain a DN, Latitude, and Longitude for each Line,Sample of the input cube data.
*
* @param csvFilename Name of the output CSV file to write to.
* @param csvHeaders Vector containing the header elements to write to the CSV file.
* @param cubeData Matrix of cube DNs.
* @param latLonPoints Matrix of latitude/longitude points for the image pixels.
*/
void writeCSV(const string &csvFilename,
const vector<string> &csvHeaders,
const vector< vector<float> > &cubeData,
const vector< vector< vector<float> > > &latLonPoints,
const vector< vector<double> > &emissionAngles,
const vector< vector<double> > &incidenceAngles) {
ofstream csvFile(csvFilename);
if (csvFile.is_open()) {
// Write the csv header
for (int str = 0; str < csvHeaders.size() - 1; str++) {
csvFile << csvHeaders[str] << ", ";
}
// Write the last header element
csvFile << csvHeaders[csvHeaders.size() - 1] << "\n";
// Write the csv records
for (int line = 0; line < cubeData.size(); line++) {
for (int sample = 0; sample < cubeData[line].size(); sample++) {
vector<float> latLon = latLonPoints[line][sample];
csvFile << line + 1 << ", "
<< sample + 1 << ", "
<< cubeData[line][sample] << ", "
<< latLon[0] << ", " << latLon[1] << ", "
<< emissionAngles[line][sample] << ", "
<< incidenceAngles[line][sample] << "\n";
}
}
}
else {
cout << "\nUnable to open file \"" << csvFilename << " for writing." << endl;
return;
}
}
<file_sep>/src/objs/IsdReader.cpp
#include <IsdReader.h>
#include <fstream>
#include <iostream>
#include <map>
#include <sstream>
#include <string>
#include <csm/Isd.h>
#include <json/json.hpp>
using namespace std;
using json = nlohmann::json;
/**
* @internal
* @todo This should be converted to a C++ class.
*/
/**
* Reads in a JSON formatted file, parses it, and creates a new csm::Isd in memory.
*
* @param filename JSON file to read.
*
* @return @b csm::Isd* Returns a pointer to the dynamically allocated csm::Isd.
* Returns a null pointer if unsuccessful.
*/
csm::Isd *readISD(string filename) {
json jsonFile;
int prec = 15;
csm::Isd *isd = NULL;
//Read the ISD file
ifstream file(filename);
if (!file.is_open()) {
perror(("error while opening file " + filename).c_str());
return NULL;
}
else if (file.bad()) {
perror(("error while reading file " + filename).c_str());
return NULL;
}
// File successfully opened
else {
isd = new csm::Isd();
file >> jsonFile;
isd->setFilename(filename);
// Parse the JSON and populate the ISD
for (json::iterator i = jsonFile.begin(); i != jsonFile.end(); i++) {
if (i.value().is_array()){
DataType arrayType = checkType(i.value()[0]);
addParam(*isd, i,arrayType,prec);
}
else {
DataType dt = checkType(i.value());
addParam(*isd,i,dt,prec);
}
}//end for
} //end outer-else
//printISD(*isd);
file.close();
return isd;
}
/**
* Checks the type of the currently parsed JSON token.
*
* @param obj The json::value_type currently being parsed.
*
* @return An enum DataType value indicating what the primitive data type of obj is.
*
* @author <NAME>
*/
DataType checkType(json::value_type obj){
if (obj.is_number()) {
if (obj.is_number_float())
return FLOAT;
else if (obj.is_number_integer())
return INT;
else if(obj.is_number_unsigned())
return UINT;
else
return UNKNOWN;
}
else if(obj.is_null())
return NULL8;
else if(obj.is_string())
return STRING;
else if(obj.is_boolean())
return BOOL;
else
return UNKNOWN;
}
/**
* Adds a parameter and its value to the ISD object being created.
*
* @param isd A reference to the ISD object being created.
* @param it The iterator to the json file which iterates over the keywords.
* @param dt The enum DataType value of the value.
* @param prec The # of decimal places to be written to the ISD (if the value is a float)
* @author <NAME>
*/
void addParam(csm::Isd &isd, json::iterator it, DataType dt, int prec) {
ostringstream key;
//output the key to the ISD
key << it.key();
if (it.value().is_array()) {
if (dt==FLOAT) {
vector<double> v = it.value();
for (int j=0;j < v.size(); j++) {
ostringstream val;
val << setprecision(prec) << v[j];
isd.addParam(key.str(),val.str());
}
}
else if(dt==INT){
vector<double> v = it.value();
for (int j=0;j < v.size(); j++) {
ostringstream val;
val << v[j];
isd.addParam(key.str(),val.str());
}
}
else if(dt==UINT) {
vector<double> v = it.value();
for (int j=0;j < v.size(); j++) {
ostringstream val;
val << v[j];
isd.addParam(key.str(),val.str());
}
}
else if(dt ==BOOL) {
vector<double> v = it.value();
for (int j=0;j < v.size(); j++) {
ostringstream val;
val << v[j];
isd.addParam(key.str(),val.str());
}
}
else if (dt ==STRING) {
vector<double> v = it.value();
for (int j=0;j < v.size(); j++) {
ostringstream val;
val << v[j];
isd.addParam(key.str(),val.str());
}
}
}
else {
if(dt==FLOAT) {
double v = it.value();
ostringstream val;
val << setprecision(prec) << v;
isd.addParam(key.str(),val.str());
}
else if(dt==INT){
int v = it.value();
ostringstream val;
val << v;
isd.addParam(key.str(),val.str());
}
else if(dt==UINT) {
unsigned int v = it.value();
ostringstream val;
val << v;
isd.addParam(key.str(),val.str());
}
else if(dt ==BOOL) {
bool v = it.value();
ostringstream val;
val << v;
isd.addParam(key.str(),val.str());
}
else if (dt ==STRING) {
string v = it.value();
ostringstream val;
val << v;
isd.addParam(key.str(),val.str());
}
else if (dt ==NULL8) {
ostringstream val;
val << "null";
isd.addParam(key.str(),val.str());
}
}//end outer else
}//end addParam
/**
* Prints the ISD to standard output.
*
* @brief printISD Display the keyword:value pairs of a CSM::ISD object
* @param isd Reference to the ISD to output.
* @author <NAME>
*/
void printISD(const csm::Isd &isd){
cout.precision(15);
const multimap<string,string> isdMap = isd.parameters();
for (auto &i: isdMap)
cout << i.first << " : " << i.second << endl;
}
<file_sep>/tests/MdisNacSensorModelTest.h
#ifndef MdisNacSensorModelTest_h
#define MdisNacSensorModelTest_h
#include <sstream>
#include <string>
#include <csm/csm.h>
#include <csm/Isd.h>
#include <csm/Model.h>
#include <MdisNacSensorModel.h>
#include <MdisPlugin.h>
// runTest.cpp defines this global string to a data directory
extern std::string g_dataPath;
/**
* Sub-class MdisNacSensorModel to get test its protected linear algebra methods.
*
* We should be testing the protected methods of MdisNacSensorModel since imageToGround
* depends on intersect, which depends on project, etc.
*/
class TestableMdisNacSensorModel : public MdisNacSensorModel {
// Give linear algebra methods public accessing when using instances of this class.
public:
using MdisNacSensorModel::setFocalPlane;
};
// Set up a fixture (i.e. objects we can use throughout test)
class MdisNacSensorModelTest : public ::testing::Test {
protected:
// Per test-case setup and teardown (e.g. once for this MdisNacSensorModelTest)
static void SetUpTestCase() {
if (g_dataPath != "") {
std::cout << g_dataPath << std::endl;
dataFile = g_dataPath + "/EN1007907102M.json";
std::cout << "dataFile: " << dataFile << std::endl;
}
isd = readISD(dataFile);
// Make sure the isd was read correctly.
if (isd == nullptr) {
setupFixtureFailed = true;
std::stringstream ss;
ss << "Could not create isd from file: " << dataFile << "\nError: " << strerror(errno);
setupFixtureError = ss.str();
return;
}
// printISD(*isd);
// Create a model from the ISD so we can test a valid image.
std::string modelName = MdisNacSensorModel::_SENSOR_MODEL_NAME;
csm::Model *validModel = mdisPlugin.constructModelFromISD(*isd, modelName);
// We could static_cast, but may be hard to debug if it doesn't correctly cast.
mdisModel = dynamic_cast<MdisNacSensorModel *>(validModel);
std::cout << "Construction model: " << mdisModel << "\n";
// Fatal failure if the downcast doesn't work
if (mdisModel == nullptr) {
setupFixtureFailed = true;
setupFixtureError = "Could not downcast Model* to MdisNacSensorModel*.";
return;
}
}
static void TearDownTestCase() {
delete isd;
delete mdisModel;
}
static bool setupFixtureFailed; // Work-around gtest issue #247.
static std::string setupFixtureError; // ^
static csm::Isd *isd; // ISD converted from JSON to use for creating model.
static std::string dataFile; // JSON data file to be converted to ISD for testing.
static MdisPlugin mdisPlugin; // Plugin used to create a model from ISD.
static MdisNacSensorModel *mdisModel; // MDIS-NAC sensor model created with ISD.
// Per test setup and teardown (e.g. each TEST_F)
virtual void SetUp() {
tolerance = 0.00001;
}
virtual void TearDown() {}
double tolerance; // Tolerance to be used for double comparison.
MdisNacSensorModel defaultMdisNac; // A default constructed MdisNacSensorModel.
TestableMdisNacSensorModel testMath; // Subclassed MdisNacSensorModel for protected methods.
};
#endif
<file_sep>/cmake/FindSpice.cmake
# Copyright (c) 2010-2014, Delft University of Technology
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are
# permitted provided that the following conditions are met:
# - Redistributions of source code must retain the above copyright notice, this list of
# conditions and the following disclaimer.
# - Redistributions in binary form must reproduce the above copyright notice, this list of
# conditions and the following disclaimer in the documentation and/or other materials
# provided with the distribution.
# - Neither the name of the Delft University of Technology nor the names of its contributors
# may be used to endorse or promote products derived from this software without specific
# prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS
# OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
# GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
# AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
# OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Changelog
# YYMMDD Author Comment
# 12xxxx <NAME> File created based on FindEigen3.cmake.
# 12xxxx <NAME>
# 12xxxx D. Dirkx Adapted to detect the SPICE library.
# 140127 <NAME> Adapted for custom Spice kernel folder.
# 150206 <NAME> Automatic library find (/w/wo lib prefix)
#
#
# References
# FindEigen3.cmake.
#
# Notes
# This script tries to find SPICE library.
#
# Original copyright statements (from FindEigen3.cmake:
# Copyright (c) 2006, 2007 <NAME>, <<EMAIL>>
# Copyright (c) 2008, 2009 <NAME>, <<EMAIL>>
# Copyright (c) 2009 <NAME> <<EMAIL>>
#
# FindEigen3.cmake states that redistribution and use is allowed according to the terms of
# the 2-clause BSD license.
# If the path has not been set previously or manually, try to autodetect the path
if(NOT SPICE_BASE_PATH)
find_path(SPICE_BASE_PATH NAMES SpiceUsr.h
PATHS
${INSTALL_DEPENDENCIES_DIR}
PATH_SUFFIXES cspice/include
)
endif(NOT SPICE_BASE_PATH)
# If the path is still not set, then autodetection went wrong
if(NOT SPICE_BASE_PATH)
# Throw a warning and disable SPICE
message(WARNING "WARNING: SPICE not found! USE_CSPICE flag has been disabled.")
SET(USE_CSPICE false)
else(NOT SPICE_BASE_PATH)
MESSAGE("BASE: ${SPICE_BASE_PATH}")
# Good, path has been set/found, now set important variables and find libraries.
set(SPICE_BASE_PATH ${SPICE_BASE_PATH}/..)
MESSAGE("BASE2: ${SPICE_BASE_PATH}")
set(SPICE_INCLUDE_DIR ${SPICE_BASE_PATH}/include)
set(SPICE_LIBRARIES_DIR ${SPICE_BASE_PATH}/lib)
find_library(SPICE_LIBRARIES
NAMES libcspice.a libcspice.lib cspice.a cspice.lib
PATHS ${SPICE_LIBRARIES_DIR})
# Force SPICE libraries, to be used when spice and other libraries are simultaneously compiled.
if(NOT SPICE_LIBRARIES)
set(SPICE_LIBRARIES "cspice")
endif( )
# Let user know which SPICE library was found.
message(STATUS "SPICE_LIBRARIES: ${SPICE_LIBRARIES}")
link_directories(${SPICE_LIBRARIES_DIR})
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(SPICE DEFAULT_MSG SPICE_INCLUDE_DIR)
mark_as_advanced(SPICE_INCLUDE_DIR)
endif(NOT SPICE_BASE_PATH)
<file_sep>/src/objs/CMakeLists.txt
INCLUDE_DIRECTORIES("${CMAKE_SOURCE_DIR}/include/mdis")
INCLUDE_DIRECTORIES("${CMAKE_SOURCE_DIR}/include/")
ADD_LIBRARY(MdisNacSensorModel SHARED MdisNacSensorModel.cpp)
ADD_LIBRARY(MdisPlugin SHARED MdisPlugin.cpp)
ADD_LIBRARY(IsdReader SHARED IsdReader.cpp)
<file_sep>/src/apps/mdis2isd/mdis2isd.cpp
#include "mdis2isd.h"
#include <fstream>
#include <iomanip>
#include <QString>
#include "Camera.h"
#include "CameraFocalPlaneMap.h"
#include "CameraPointInfo.h"
#include "Distance.h"
#include "FileName.h"
#include "IException.h"
#include "iTime.h"
#include "NaifStatus.h"
#include "PvlGroup.h"
#include "PvlKeyword.h"
#include "PvlObject.h"
#include "Spice.h"
using namespace std;
namespace Isis {
mdis2isd::mdis2isd(QString cubeFileName) {
m_cubeFileString = cubeFileName;
m_validCube = true;
try {
m_campt.SetCube(m_cubeFileString);
}
catch (IException &e) {
//QString msg = "Unable to call CameraPointInfo::SetCube";
cout << e.toString() << endl;
m_validCube = false;
//throw IException(e, IException::Unknown, msg, _FILEINFO_);
}
}
mdis2isd::~mdis2isd(){
}
/**
* @brief mdis2isd::isdJSON This function outputs the ISD file created by this
* application in JSON format. It is very simple, and will need to be modified.
* The key-value appairs assume <string,double>, which might not be the case as
* the second value could be anything from a primitive type to an array of primitive
* types.
* @param isdList A ptr to a vector of key-value pairs storing the ISD values
* @param sensorModel The name of the sensor model.
* @param The output path and name for the JSON file.
*/
void mdis2isd::isdJSON(std::vector<std::pair<std::string,double> > * isdList,std::string sensorModel,
std::string filePath){
ofstream os;
os.open(filePath.c_str(), ofstream::out);
os << "{" << endl;
os << "\"" <<"ISD_SENSOR_MODEL_NAME"<<"\":" << sensorModel << ",";
os << setprecision(prec);
unsigned int nparams = isdList->size();
for (unsigned int i =0;i < nparams-1;i++) {
pair<string,double> isdNode=isdList->at(i);
os << "\"" << isdNode.first << "\":" << isdNode.second << ",";
}
pair<string,double> lastNode=isdList->at(nparams-1);
os << "\"" << lastNode.first << "\":" << lastNode.second << "}";
os.close();
}
/**
* @brief mdis2isd:writeISD This function grabs the necessary Spice date from ISIS
* and outputs a simple ISD file.
*/
void mdis2isd::writeISD(){
if (m_validCube == false) {
cout << "Invalid cube" << endl;
return;
}
PvlGroup * caminfo = m_campt.SetCenter(false,true);
std::vector<std::pair<string,double> > isdList;
double spacecraftPosition[3] = {0.0,0.0,0.0};
double instrumentPosition[3] = {0.0,0.0,0.0};
double omegaPhiKappa[3] = {0.0,0.0,0.0};
Distance dRadii[3];
double isisFocalPlane2SocetPlate[3][3] = {{0.0, 0.0, 0.0}, {0.0, 0.0, 0.0}, {0.0, 0.0, 0.0}};
FileName inFile(m_cubeFileString);
Cube icube(inFile);
if (icube.isProjected()) {
QString msg = QString("You can only create a CSM from a level 1 image. "
"The input image [%1] is a map projected, level "
"2, cube.").arg(inFile.expanded());
cout << msg << endl;
return;
}
// Make sure the image contains the SPICE blobs/tables
PvlGroup test = icube.label()->findGroup("Kernels", Pvl::Traverse);
QString instrumentPointing = (QString) test["InstrumentPointing"];
if (instrumentPointing != "Table") {
QString msg = QString("Input image [%1] does not contain needed SPICE blobs. Please run "
"spiceinit on the image with attach=yes.").arg(inFile.expanded());
cout << msg << endl;
return;
}
PvlObject naifKeywords = icube.label()->findObject("NaifKeywords");
double boresightLine =0.0;
double boresightSample=0.0;
Camera *cam = icube.camera();
CameraFocalPlaneMap *focalMap = cam->FocalPlaneMap();
boresightLine = focalMap->DetectorSampleOrigin();
boresightSample = focalMap->DetectorLineOrigin();
double et = cam->time().Et();
Spice spice(icube);
spice.setTime(et);
//Retrieve instrument position and target body radii in meters
spice.radii(dRadii);
double radii[3] = {0.0, 0.0, 0.0};
radii[0] = dRadii[0].meters();
radii[1] = dRadii[1].meters();
radii[2] = dRadii[2].meters();
//Retrieve Spacecraft position
spacecraftPosition[0]=((*caminfo)["SpacecraftPosition"][0]).toDouble();
spacecraftPosition[1]=((*caminfo)["SpacecraftPosition"][0]).toDouble();
spacecraftPosition[2]=((*caminfo)["SpacecraftPosition"][0]).toDouble();
spice.instrumentPosition(instrumentPosition);
for (int i = 0; i < 3; i++) {
instrumentPosition[i] *= 1000.0;
spacecraftPosition[i] *= 1000.0;
}
// Fetch Bodyfixed -> Camera matrix w cspice
vector<double> j2000ToBodyFixedMatrixVector = spice.bodyRotation()->Matrix();
vector<double> j2000ToCameraMatrixVector = spice.instrumentRotation()->Matrix();
// Reformat vector-matrices to 3x3 rotation matricies
double j2000ToBodyFixedRotationMatrix[3][3] = {{0.0, 0.0, 0.0},
{0.0, 0.0, 0.0},
{0.0, 0.0, 0.0}};
double j2000ToCameraRotationMatrix[3][3] = {{0.0, 0.0, 0.0},
{0.0, 0.0, 0.0},
{0.0, 0.0, 0.0}};
for (int j = 0; j < 3; j++) {
for (int k = 0; k < 3; k++) {
j2000ToBodyFixedRotationMatrix[j][k] = j2000ToBodyFixedMatrixVector[3 * j + k];
j2000ToCameraRotationMatrix[j][k] = j2000ToCameraMatrixVector[3 * j + k];
}
}
// Compute Camera to Body Fixed rotation matrix
double cameraToBodyFixedRotationMatrix[3][3] = {{0.0, 0.0, 0.0},
{0.0, 0.0, 0.0},
{0.0, 0.0, 0.0}};
mxmt_c(j2000ToBodyFixedRotationMatrix, j2000ToCameraRotationMatrix,
cameraToBodyFixedRotationMatrix);
PvlGroup inst = icube.label()->findGroup("Instrument", Pvl::Traverse);
QString instrumentId = (QString) inst["InstrumentId"];
QString spacecraftName = (QString) inst["SpacecraftName"];
if (spacecraftName == "Messenger") {
isisFocalPlane2SocetPlate[0][0] = 1.0;
isisFocalPlane2SocetPlate[1][1] = -1.0;
isisFocalPlane2SocetPlate[2][2] = -1.0;
}
//Calculate ographic coordinates of spacecraft position vector
double xyzLength = instrumentPosition[0] * instrumentPosition[0] +
instrumentPosition[1] * instrumentPosition[1];
double xyLength = sqrt(xyzLength);
xyzLength = sqrt (xyzLength + instrumentPosition[2] * instrumentPosition[2]);
double flattening = (radii[0] - radii[2]) / radii[0];
double lon = 0.0;
double lat = 0.0;
double height = 0.0;
recgeo_c (instrumentPosition, radii[0], flattening, &lon, &lat, &height);
// Calculate rotation matrix from Socet Set plate to ocentric ground coordinates
double socetPlateToOcentricGroundRotationMatrix[3][3] = {{0.0, 0.0, 0.0},
{0.0, 0.0, 0.0},
{0.0, 0.0, 0.0}};
mxmt_c (isisFocalPlane2SocetPlate, cameraToBodyFixedRotationMatrix,
socetPlateToOcentricGroundRotationMatrix);
// Populate the ocentric to ographic rotation matrix
double ocentricToOgraphicRotationMatrix[3][3] = {{0.0, 0.0, 0.0},
{0.0, 0.0, 0.0},
{0.0, 0.0, 0.0}};
double sinLon = instrumentPosition[1] / xyLength;
double cosLon = instrumentPosition[0] / xyLength;
double sinLat = instrumentPosition[2] / xyzLength;
double cosLat = xyLength / xyzLength;
ocentricToOgraphicRotationMatrix[0][0] = -sinLon;
ocentricToOgraphicRotationMatrix[1][0] = cosLon;
ocentricToOgraphicRotationMatrix[2][0] = 0.0;
ocentricToOgraphicRotationMatrix[0][1] = -sinLat * cosLon;
ocentricToOgraphicRotationMatrix[1][1] = -sinLat * sinLon;
ocentricToOgraphicRotationMatrix[2][1] = cosLat;
ocentricToOgraphicRotationMatrix[0][2] = cosLat * cosLon;
ocentricToOgraphicRotationMatrix[1][2] = cosLat * sinLon;
ocentricToOgraphicRotationMatrix[2][2] = sinLat;
// Compute the Rotation matrix from Socet Set plate to ographic ground coordinates
// and extract the euler angles to get omega-phi-kappa attidude angles
double socetPlateToOgrphicGroundRotationMatrix[3][3] = {{0.0, 0.0, 0.0},
{0.0, 0.0, 0.0},
{0.0, 0.0, 0.0}};
mxm_c (socetPlateToOcentricGroundRotationMatrix, ocentricToOgraphicRotationMatrix,
socetPlateToOgrphicGroundRotationMatrix);
double omega = 0.0;
double phi = 0.0;
double kappa = 0.0;
m2eul_c (socetPlateToOgrphicGroundRotationMatrix, 3, 2, 1, &kappa, &phi, &omega);
// Return resulting geographic lat, lon, omega, phi, kappa in decimal degrees
// height in meters
//ographicCamPos[0] = lat * RAD2DEG;
//ographicCamPos[1] = lon * RAD2DEG;
//ographicCamPos[2] = height;
omegaPhiKappa[0] = omega * RAD2DEG;
omegaPhiKappa[1] = phi * RAD2DEG;
omegaPhiKappa[2] = kappa * RAD2DEG;
ofstream os;
QString isdFile = inFile.expanded().split(".",QString::SkipEmptyParts).at(0)+".isd";
os.open(isdFile.toLatin1().data(), ios::out);
QString modelName("MDIS_SENSOR_MODEL");
isdList.push_back(pair<string,double>("ISD_LINE_PRINCIPAL_POINT_PIXELS",boresightLine));
isdList.push_back(pair<string,double>("ISD_SAMPLE_PRINCIPAL_POINT_PIXELS",boresightSample));
isdList.push_back(pair<string,double>("ISD_FOCAL_LENGTH_PIXELS",double(naifKeywords["TempDependentFocalLength"])));
isdList.push_back(pair<string,double>("ISD_NUMBER_OF_LINES",(double)(icube.lineCount())));
isdList.push_back(pair<string,double>("ISD_NUMBER_OF_SAMPLES",(double)(icube.sampleCount())));
isdList.push_back(pair<string,double>("ISD_SEMI_MAJOR_AXIS_METERS",radii[0]));
isdList.push_back(pair<string,double>("ISD_SEMI_MINOR_AXIS_METERS",radii[1]));
isdList.push_back(pair<string,double>("ISD_MIN_ELEVATION_METERS",0.0));
isdList.push_back(pair<string,double>("ISD_MAX_ELEVATION_METERS",0.0));
isdList.push_back(pair<string,double>("ISD_X_SENSOR_ORIG_METERS",instrumentPosition[0]));
isdList.push_back(pair<string,double>("ISD_X_SENSOR_CURR_METERS",instrumentPosition[0]));
isdList.push_back(pair<string,double>("ISD_Y_SENSOR_ORIG_METERS",instrumentPosition[1]));
isdList.push_back(pair<string,double>("ISD_Y_SENSOR_CURR_METERS",instrumentPosition[1]));
isdList.push_back(pair<string,double>("ISD_Z_SENSOR_ORIG_METERS",instrumentPosition[2]));
isdList.push_back(pair<string,double>("ISD_Z_SENSOR_CURR_METERS",instrumentPosition[2]));
isdList.push_back(pair<string,double>("ISD_OMEGA_ORIG_RADIANS",omegaPhiKappa[0]));
isdList.push_back(pair<string,double>("ISD_OMEGA_CURR_RADIANS",omegaPhiKappa[0]));
isdList.push_back(pair<string,double>("ISD_PHI_ORIG_RADIANS",omegaPhiKappa[1]));
isdList.push_back(pair<string,double>("ISD_PHI_CURR_RADIANS",omegaPhiKappa[1]));
isdList.push_back(pair<string,double>("ISD_KAPPA_ORIG_RADIANS",omegaPhiKappa[2]));
isdList.push_back(pair<string,double>("ISD_KAPPA_CURR_RADIANS",omegaPhiKappa[2]));
isdList.push_back(pair<string,double>("ISD_ORIGINAL_PARAMETER_COVARIANCE",0.0));
isdList.push_back(pair<string,double>("ISD_CURRENT_PARAMETER_COVARIANCE",0.0));
os << "ISD_SENSOR_MODEL_NAME\t";
os << modelName << endl;
os << setprecision(prec);
for (unsigned int i =0;i < isdList.size();i++) {
pair<string,double> isdNode=isdList[i];
os << isdNode.first << "\t\t" << isdNode.second << endl;
}
os.close();
isdJSON(&isdList,modelName.toStdString(),"json.isd");
}
}
<file_sep>/src/apps/set/CMakeLists.txt
INCLUDE_DIRECTORIES("${CMAKE_SOURCE_DIR}/include/mdis")
INCLUDE_DIRECTORIES("${CMAKE_SOURCE_DIR}/include/csm")
INCLUDE_DIRECTORIES("${CMAKE_SOURCE_DIR}/include/")
ADD_EXECUTABLE(set set.cpp)
# Find libcsmapi.so
FIND_LIBRARY(CSMAPI_LIBRARY csmapi "${CMAKE_SOURCE_DIR}/lib" "${CMAKE_SOURCE_DIR}/_build/INSTALL/lib")
MESSAGE(STATUS "CSMAPI_LIBRARY: " ${CSMAPI_LIBRARY})
TARGET_LINK_LIBRARIES(set dl ${CSMAPI_LIBRARY} IsdReader MdisNacSensorModel MdisPlugin gdal)
<file_sep>/tests/MdisWacSensorModelTest.cpp
#include <string>
#include <csm/Isd.h>
#include <gtest/gtest.h>
#include <MdisPlugin.h>
#include <MdisNacSensorModel.h>
#include <IsdReader.h>
#include "MdisWacSensorModelTest.h"
bool MdisWacSensorModelTest::setupFixtureFailed = false;
std::string MdisWacSensorModelTest::setupFixtureError;
csm::Isd *MdisWacSensorModelTest::isd = nullptr;
std::string MdisWacSensorModelTest::dataFile;
MdisPlugin MdisWacSensorModelTest::mdisPlugin;
MdisNacSensorModel *MdisWacSensorModelTest::mdisModel = nullptr;
/*
* Test imageToGround - truth extracted as follows:
* setisis isis3
* qview /work/projects/IAA_camera/data/EN100790102M.cub
* F (selects "Find Tool")
* On top toolbar, select "Find Point"
* Type in 513, 513 for Sample/Line (ISIS3 pixel center = 1,1)
* Click "Record Point"
* Check "XYZ" -> { 1132.18, -1597.75, 1455.66 }
*/
TEST_F(MdisWacSensorModelTest, imageToGroundCenter) {
// gtest #247 work-around
if (setupFixtureFailed) {
FAIL() << setupFixtureError;
}
csm::ImageCoord point(512, 512);
double height = 0.0;
csm::EcefCoord xyz = mdisModel->imageToGround(point, height);
//std::cout << std::cout.precision(15) << "\n(512,512) XYZ: "
// << xyz.x << ", " << xyz.y << ", " << xyz.z << "\n\n";
double truth[] = {-73589.5516508502, 562548.342040933, 2372508.44060771};
EXPECT_NEAR(truth[0], xyz.x, 0.01);
EXPECT_NEAR(truth[1], xyz.y, 0.01);
EXPECT_NEAR(truth[2], xyz.z, 0.01);
}
TEST_F(MdisWacSensorModelTest, imageToGroundOffCenter){
if (setupFixtureFailed) {
FAIL() << setupFixtureError;
}
csm::ImageCoord point(100, 100);
double height = 0.0;
csm::EcefCoord xyz = mdisModel->imageToGround(point, height);
//std::cout << std::cout.precision(15) << "\n(100,100) XYZ: "
// << xyz.x << ", " << xyz.y << ", " << xyz.z << "\n\n";
double truth[] = {-48020.2164819883, 539322.805489926, 2378549.41724731};
EXPECT_NEAR(truth[0], xyz.x, 0.01);
EXPECT_NEAR(truth[1], xyz.y, 0.01);
EXPECT_NEAR(truth[2], xyz.z, 0.01);
}
// Test groundToImage
TEST_F(MdisWacSensorModelTest, groundToImageCenter) {
// gtest #247 work-around
if (setupFixtureFailed) {
FAIL() << setupFixtureError;
}
csm::EcefCoord xyz {-73589.5516508502, 562548.342040933, 2372508.44060771};
csm::ImageCoord pt = mdisModel->groundToImage(xyz);
// Use 1/2 pixel as tolerance
EXPECT_NEAR(512, pt.line, 0.1);
EXPECT_NEAR(512, pt.samp, 0.1);
}
TEST_F(MdisWacSensorModelTest, groundToImageOffCenter) {
if (setupFixtureFailed) {
FAIL() << setupFixtureError;
}
csm::EcefCoord xyz {-48020.2164819883, 539322.805489926, 2378549.41724731};
xyz = mdisModel->imageToGround(csm::ImageCoord(100,100),0);
csm::ImageCoord pt = mdisModel->groundToImage(xyz);
EXPECT_NEAR(100, pt.line, 0.1);
EXPECT_NEAR(100, pt.samp, 0.1);
}
<file_sep>/CMakeLists.txt
CMAKE_MINIMUM_REQUIRED (VERSION 3.0)
include_directories(include)
include_directories(INSTALL/cspice/include)
include(CheckCXXCompilerFlag)
# Define the version and project name
SET(CameraModel_Version_Major 0)
SET(CameraModel_Version_Minor 1)
PROJECT(CameraModel)
# Set additional module search paths
SET(CMAKE_MODULE_PATH ${CMAKE_SOURCE_DIR}/cmake ${CMAKE_MODULE_PATH})
#Setup to use CPP11
ADD_COMPILE_OPTIONS(-std=c++11)
# Setup for external dependencies (either use system or install)
if( NOT INSTALL_DEPENDENCIES_DIR )
SET( INSTALL_DEPENDENCIES_DIR ${CMAKE_BINARY_DIR}/INSTALL CACHE STRING "Install directory for dependencies")
endif()
# Add external project capability
INCLUDE(ExternalProject)
# Provide the ability to download dependencies and build (default: download)
OPTION (USE_SYSTEM_EIGEN "Use system libraries for Eigen" OFF)
if (${USE_SYSTEM_EIGEN} MATCHES "OFF")
include("${CMAKE_MODULE_PATH}/External-Eigen.cmake")
endif()
FIND_PACKAGE(Eigen3 REQUIRED 3.4)
# Find Spice
OPTION (USE_SYSTEM_SPICE "Use system libraries for the NAIFSpice Toolkit" OFF)
if (${USE_SYSTEM_SPICE} MATCHES "OFF")
INCLUDE("${CMAKE_MODULE_PATH}/External-Spice.cmake")
endif()
FIND_PACKAGE(Spice)
# Find GDAL
FIND_PACKAGE(GDAL REQUIRED)
# whether not tests should be built
OPTION (ENABLE_TESTS "Build the tests?" OFF)
# Add the subdirs that are being built
ADD_SUBDIRECTORY(src)
# To enable tests pass -DENABLE_TESTS=true to the cmake command
IF (ENABLE_TESTS)
# Download and unpack googletest at configure time
CONFIGURE_FILE(tests/CMakeGTEST.txt.in
googletest-download/CMakeLists.txt)
EXECUTE_PROCESS(COMMAND ${CMAKE_COMMAND} -G "${CMAKE_GENERATOR}" .
WORKING_DIRECTORY ${CMAKE_BINARY_DIR}/googletest-download )
EXECUTE_PROCESS(COMMAND ${CMAKE_COMMAND} --build .
WORKING_DIRECTORY ${CMAKE_BINARY_DIR}/googletest-download )
# Add googletest directly to our build. This adds
# the following targets: gtest, gtest_main, gmock
# and gmock_main
ADD_SUBDIRECTORY(${CMAKE_BINARY_DIR}/googletest-src
${CMAKE_BINARY_DIR}/googletest-build)
# Now simply link your own targets against gtest, gmock,
# etc. as appropriate
ADD_SUBDIRECTORY(tests)
ENDIF(ENABLE_TESTS)
# Define the test data directory (for gtesting)
SET(TESTDATADIR ${CMAKE_SOURCE_DIR}/tests/data)<file_sep>/src/apps/spice2isd/spice2isd.cpp
#include "SpiceController.h"
#include "CSpiceIsd.h"
#include <gdal/gdal.h>
#include <SpiceUsr.h>
#include <iostream>
#include <iomanip>
#include <string>
#include <utility>
#include <vector>
#include <gdal/gdal.h>
#include <gdal/gdal_priv.h>
#include <gdal/cpl_conv.h>
#include <gdal/cpl_string.h>
using namespace std;
void cubeArray(vector<vector<float> > *cube, GDALRasterBand *poBand);
int main(int argc,char *argv[]) {
int ikid = 236820;
vector<pair<string,SpiceDouble> > isdList;
CSpiceIsd cspice("blah.cub");
SpiceController sc;
string kernel1("data/msgr_v231.tf");
string kernel2("data/msgr_v231.tf");
string kernel3("data/pck00010_msgr_v23.tpc");
string kernel4("data/msgr_dyn_v600.tf");
string kernel5("data/msgr_v231.tf");
string kernel6("data/msgr_de405_de423s.bsp");
string kernel7("data/msgr_mdis_sc050727_100302_sub_v1.bc");
string kernel8("data/msgr_mdis_gm040819_150430v1.bc");
string kernel9("data/naif0011.tls");
string kernel10("data/messenger_2548.tsc");
furnsh_c("data/msgr_mdis_v160.ti");
furnsh_c("data/msgr_v231.tf");
furnsh_c("data/pck00010_msgr_v23.tpc");
furnsh_c("data/msgr_dyn_v600.tf");
furnsh_c("data/msgr_v231.tf");
furnsh_c("data/msgr_de405_de423s.bsp");
furnsh_c("data/msgr_mdis_sc050727_100302_sub_v1.bc");
furnsh_c("data/msgr_mdis_gm040819_150430v1.bc");
furnsh_c("data/naif0011.tls");
furnsh_c("data/messenger_2548.tsc");
SpiceInt code;
SpiceInt found;
bodn2c_c("MESSENGER",&code,&found);
SpiceDouble focalLength;
SpiceInt numLines;
SpiceInt numSamples;
SpiceInt n;
SpiceDouble odt_x[9];
SpiceDouble odt_y[9];
SpiceDouble flEpsilon;
SpiceDouble pixelPitch;
SpiceDouble ccdCenter;
SpiceDouble ifov;
SpiceDouble boresight[3];
SpiceDouble transX[3];
SpiceDouble transY[3];
SpiceDouble itranss[3];
SpiceDouble itransl[3];
SpiceDouble startSample;
SpiceDouble startLine;
int prec = 10;
gdpool_c ("INS-236820_FOCAL_LENGTH", 0,1, &n, &focalLength, &found );
gdpool_c ("INS-236820_FL_UNCERTAINTY", 0,1, &n, &flEpsilon, &found );
gipool_c ("INS-236820_PIXEL_LINES", 0,1, &n, &numLines, &found );
gipool_c ("INS-236820_PIXEL_SAMPLES", 0,1, &n, &numSamples, &found );
gdpool_c ("INS-236820_PIXEL_PITCH", 0,1, &n, &pixelPitch, &found );
gdpool_c ("INS-236820_CCD_CENTER", 0,1, &n, &ccdCenter, &found );
gdpool_c ("INS-236820_IFOV", 0,1, &n, &ifov, &found );
gdpool_c ("INS-236820_BORESIGHT", 0,3, &n, boresight, &found );
gdpool_c ("INS-236820_TRANSX", 0,3, &n, transX, &found );
gdpool_c ("INS-236820_TRANSY", 0,3, &n, transY, &found );
gdpool_c ("INS-236820_ITRANSS", 0,3, &n, itranss, &found );
gdpool_c ("INS-236820_ITRANSL", 0,3, &n, itransl, &found );
gdpool_c ("INS-236820_OD_T_X", 0,9, &n, odt_x, &found );
gdpool_c ("INS-236820_OD_T_Y", 0,9, &n, odt_y, &found );
gdpool_c ("INS-236820_FPUBIN_START_SAMPLE", 0,1, &n, &startSample, &found );
gdpool_c ("INS-236820_FPUBIN_START_LINE", 0,1, &n, &startLine, &found );
string cubePath("CN0108840044M_IF_5_NAC_spiced.cub");
GDALDataset *poDataset;
GDALRasterBand *poBand;
int nBlockXSize,nBlockYSize;
GDALAllRegister();
poDataset = (GDALDataset *)GDALOpen(cubePath.c_str(),GA_ReadOnly);
if(poDataset == NULL) {
cout << "Could not open the:" + cubePath << endl;
}
else {
poBand = poDataset->GetRasterBand(1);
poBand->GetBlockSize(&nBlockXSize,&nBlockYSize);
//Read a band of data
vector<vector<float> > cubeMatrix;
cubeArray(&cubeMatrix,poBand);
for (int i =0; i < cubeMatrix.size(); i++ ) {
vector<float> v = cubeMatrix[i];
for (int j = 0; j < v.size(); j++) {
cout << v[j] << endl;
}//end inner-for
}//end outer-for
} //end else
}
/**
* @brief cubeArray: Translates a GDALRasterBand into a 2D vector matrix
* @param cube: The 2D vector matrix which is output by this function.
* @param poBand: The GDALRasterBand obtained from the ISIS3 cube.
*/
void cubeArray(vector <vector<float> > *cube,GDALRasterBand *poBand) {
vector<float> tempVector;
float *pafScanline;
int nsamps = poBand->GetXSize();
int nlines = poBand->GetYSize();
for (int j = 0;j<nlines;j++) {
pafScanline = (float *)CPLMalloc(sizeof(float)*nsamps);
poBand->RasterIO(GF_Read,0,j,nsamps,1,pafScanline,nsamps,1,GDT_Float32,0,0);
for (int i = 0;i < nsamps;i++) {
tempVector.push_back(pafScanline[i]);
}
cube->push_back(tempVector);
tempVector.clear();
//free the memory allocated to store the current scanline
CPLFree(pafScanline);
}
}
<file_sep>/tests/runTests.cpp
#include <string>
#include <gtest/gtest.h>
// Set up a global path to a test data directory
std::string g_dataPath;
// Run the tests (see CMakeLists.txt)
int main(int argc, char **argv) {
::testing::InitGoogleTest(&argc, argv);
// If we have an argument, this is for the test data directory
// Root CMakeLists.txt defines the test data directory
if (argc == 2) {
g_dataPath = argv[1];
}
return RUN_ALL_TESTS();
}
<file_sep>/src/CMakeLists.txt
ADD_SUBDIRECTORY(apps)
ADD_SUBDIRECTORY(objs)
ADD_SUBDIRECTORY(transformations)
<file_sep>/src/apps/CMakeLists.txt
#ADD_SUBDIRECTORY(mdis2isd)
ADD_SUBDIRECTORY(spice2isd)
ADD_SUBDIRECTORY(set)
<file_sep>/src/apps/spice2isd/SpiceController.h
#ifndef SpiceController_h
#define SpiceController_h
#include <iomanip>
#include <iostream>
#include <numeric>
#include <sstream>
#include <string>
#include <vector>
using namespace std;
class SpiceController {
public:
//Default constructor
SpiceController():m_kernlist(),m_furnish(true){ }
/** Returns the number of kernels found and/or loaded */
int size() const {
return (m_kernlist.size());
}
void load();
virtual ~SpiceController() {
unload();
}
void loadKernel(string &kernelFile);
private:
vector<string> m_kernlist; //!< The list of kernels
bool m_furnish; //!< Load the kernels found?
void unload();
};
#endif
<file_sep>/src/apps/spice2isd/CSpiceIsd.h
#ifndef CSPICEISD_H
#define CSPICEISD_H
#include <utility>
#include <vector>
#include <string>
using namespace std;
class CSpiceIsd
{
public:
CSpiceIsd(string cubeFile);
~CSpiceIsd();
void isdJSON(vector<pair<string,double> > * isdData,string sensorModel,
string filePath);
void writeISD();
private:
string m_cubeFileString;
bool m_validCube;
static const int prec =16;
};
#endif
<file_sep>/tests/MdisNacSensorModelTest.cpp
#include <string>
#include <csm/Isd.h>
#include <gtest/gtest.h>
#include <MdisPlugin.h>
#include <MdisNacSensorModel.h>
#include <IsdReader.h>
#include "MdisNacSensorModelTest.h"
bool MdisNacSensorModelTest::setupFixtureFailed = false;
std::string MdisNacSensorModelTest::setupFixtureError;
csm::Isd *MdisNacSensorModelTest::isd = nullptr;
std::string MdisNacSensorModelTest::dataFile;
MdisPlugin MdisNacSensorModelTest::mdisPlugin;
MdisNacSensorModel *MdisNacSensorModelTest::mdisModel = nullptr;
/*
* Test imageToGround - truth extracted as follows:
* setisis isis3
* qview /work/projects/IAA_camera/data/EN100790102M.cub
* F (selects "Find Tool")
* On top toolbar, select "Find Point"
* Type in 513, 513 for Sample/Line (ISIS3 pixel center = 1,1)
* Click "Record Point"
* Check "XYZ" -> { 1132.18, -1597.75, 1455.66 }
*/
TEST_F(MdisNacSensorModelTest, imageToGroundCenter) {
// gtest #247 work-around
if (setupFixtureFailed) {
FAIL() << setupFixtureError;
}
csm::ImageCoord point(512., 512.);
double height = 0.0;
csm::EcefCoord xyz = mdisModel->imageToGround(point, height);
double truth[] = { 1129.25*1000, -1599.26*1000, 1455.28*1000 };
EXPECT_EQ(truth[0], xyz.x);
EXPECT_EQ(truth[1], xyz.y);
EXPECT_EQ(truth[2], xyz.z);
}
TEST_F(MdisNacSensorModelTest, imageToGroundOffCenter){
if (setupFixtureFailed) {
FAIL() << setupFixtureError;
}
csm::ImageCoord point(100, 100);
double height = 0.0;
csm::EcefCoord xyz = mdisModel->imageToGround(point, height);
double truth[] = { 1115.95*1000, -1603.44*1000, 1460.93*1000 };
EXPECT_EQ(truth[0], xyz.x);
EXPECT_EQ(truth[1], xyz.y);
EXPECT_EQ(truth[2], xyz.z);
}
// Test getIlluminationDirection
TEST_F(MdisNacSensorModelTest, getIlluminationDirection1) {
// gtest #247 work-around
if (setupFixtureFailed) {
FAIL() << setupFixtureError;
}
// sun position based on EN1007907102M.json
// -31648725087.588726, -60633907522.72863, -38729485.77334732
csm::EcefCoord northPole { 0., 0., 2439.4 * 1000 };
csm::EcefVector illuminationDirection = mdisModel->getIlluminationDirection(northPole);
EXPECT_NEAR(31648725087.588726, illuminationDirection.x, 0.1);
EXPECT_NEAR(60633907522.72863, illuminationDirection.y, 0.1);
EXPECT_NEAR(2439.4*1000 - -38729485.77334732, illuminationDirection.z, 0.1);
}
// Test getSensorPosition(ImageCoord)
TEST_F(MdisNacSensorModelTest, getSensorPositionCoord) {
// gtest #247 work-around
if (setupFixtureFailed) {
FAIL() << setupFixtureError;
}
csm::EcefCoord sensorPos = mdisModel->getSensorPosition(csm::ImageCoord(512.0, 512.0));
EXPECT_NEAR(1728181.03, sensorPos.x, 0.01);
EXPECT_NEAR(-2088202.59, sensorPos.y, 0.01);
EXPECT_NEAR(2082707.61, sensorPos.z, 0.01);
}
TEST_F(MdisNacSensorModelTest, getSensorPositionCoordOutOfBounds) {
// gtest #247 work-around
if (setupFixtureFailed) {
FAIL() << setupFixtureError;
}
// Test all possibilites of logical condition, image size = 1024x1024
EXPECT_THROW({
// Line is negative
csm::EcefCoord sensorPos = mdisModel->getSensorPosition(csm::ImageCoord(-1.0, 1.0));
},
csm::Error);
EXPECT_THROW({
// Sample is negative
csm::EcefCoord sensorPos = mdisModel->getSensorPosition(csm::ImageCoord(1.0, -1.0));
},
csm::Error);
EXPECT_THROW({
// Line > 1024
csm::EcefCoord sensorPos = mdisModel->getSensorPosition(csm::ImageCoord(1100.0, 1.0));
},
csm::Error);
EXPECT_THROW({
// Sample > 1024
csm::EcefCoord sensorPos = mdisModel->getSensorPosition(csm::ImageCoord(1.0, 1100.0));
},
csm::Error);
}
// Test imageToProximateImagingLocus
TEST_F(MdisNacSensorModelTest, imageToProximateImagingLocus1) {
// gtest #247 work-around
if (setupFixtureFailed) {
FAIL() << setupFixtureError;
}
csm::ImageCoord point(512.0, 512.0);
csm::EcefCoord ground(0,0,0);
csm::EcefLocus proximateLocus = mdisModel->imageToProximateImagingLocus(point, ground);
double spacecraftX = atof(isd->param("x_sensor_origin").c_str());
double spacecraftY = atof(isd->param("y_sensor_origin").c_str());
double spacecraftZ = atof(isd->param("z_sensor_origin").c_str());
EXPECT_EQ(spacecraftX, proximateLocus.point.x);
EXPECT_EQ(spacecraftY, proximateLocus.point.y);
EXPECT_EQ(spacecraftZ, proximateLocus.point.z);
EXPECT_NEAR(-0.6015027, proximateLocus.direction.x, tolerance);
EXPECT_NEAR(0.4910591, proximateLocus.direction.y, tolerance);
EXPECT_NEAR(-0.630123, proximateLocus.direction.z, tolerance);
}
TEST_F(MdisNacSensorModelTest, distortionModel2) {
double dx = -6.30;
double dy = 6.40;
double udx = 0.0;
double udy = 0.0;
double isis3_udx = -6.3036234000160273893698104075156152248383;
double isis3_udy = 6.3445144408882310216313271666876971721649;
testMath.setFocalPlane(dx,dy,udx,udy);
EXPECT_NEAR(udx,isis3_udx,tolerance);
EXPECT_NEAR(udy,isis3_udy,tolerance);
}
TEST_F(MdisNacSensorModelTest, getImageStart) {
csm::ImageCoord start = mdisModel->getImageStart();
EXPECT_EQ(start.line, 1.0);
EXPECT_EQ(start.samp, 9.0);
}
TEST_F(MdisNacSensorModelTest, getImageSize) {
csm::ImageVector size = mdisModel->getImageSize();
EXPECT_EQ(size.line, 1024);
EXPECT_EQ(size.samp, 1024);
}
TEST_F(MdisNacSensorModelTest, getImageTime) {
csm::ImageCoord point;
point.samp = 500;
point.line = 500;
double time = mdisModel->getImageTime(point);
EXPECT_NEAR(time, 418855170.49299997, tolerance);
}
TEST_F(MdisNacSensorModelTest, getSensorPosition) {
csm::ImageCoord point;
point.samp = 500;
point.line = 500;
csm::EcefCoord position = mdisModel->getSensorPosition(point);
EXPECT_NEAR(position.x, 1728357.7031238307, tolerance);
EXPECT_NEAR(position.y, -2088409.0061042644, tolerance);
EXPECT_NEAR(position.z, 2082873.9280557402, tolerance);
}
TEST_F(MdisNacSensorModelTest, getNumParameters) {
EXPECT_EQ(mdisModel->getNumParameters(), 6);
}
TEST_F(MdisNacSensorModelTest, getParametersName) {
EXPECT_EQ(mdisModel->getParameterName(0), "X Sensor Position (m)");
EXPECT_EQ(mdisModel->getParameterName(3), "Omega (radians)");
}
TEST_F(MdisNacSensorModelTest, getParameterUnits) {
EXPECT_EQ(mdisModel->getParameterUnits(0), "m");
EXPECT_EQ(mdisModel->getParameterUnits(3), "radians");
}
TEST_F(MdisNacSensorModelTest, testSetGetParameterValue) {
mdisModel->setParameterValue(3, 7.5);
EXPECT_NEAR(mdisModel->getParameterValue(3), 7.5, tolerance);
mdisModel->setParameterValue(3, 2.25613869898165);
EXPECT_NEAR(mdisModel->getParameterValue(3), 2.25613869898165, tolerance);
}
<file_sep>/src/apps/spice2isd/SpiceController.cpp
#include <fstream>
#include <iomanip>
#include <iostream>
#include <numeric>
#include <sstream>
#include <string>
#include <vector>
#include <SpiceUsr.h>
#include "SpiceController.h"
//Compiling class
//g++ *.cpp -c -Wall -Wextra -I../../../include -L../../../lib/
using namespace std;
void SpiceController::unload() {
for (unsigned int i = 0; i < m_kernlist.size();i++) {
unload_c(m_kernlist[i].c_str() );
}
}
void SpiceController::load() {
cout <<"Loading Spice kernels" << endl;
}
void SpiceController::loadKernel(string &kernelFile) {
fstream inFile(kernelFile.c_str());
if(!inFile.good() ) {
cout << "Could not load: " << kernelFile << endl;
return;
}
if(m_furnish) {
furnsh_c(kernelFile.c_str() );
m_kernlist.push_back(kernelFile);
cout << kernelFile << " loaded." <<endl;
}
}
<file_sep>/src/transformations/CMakeLists.txt
INCLUDE_DIRECTORIES("${EIGEN3_INCLUDE_DIR}")
INCLUDE_DIRECTORIES("${CMAKE_SOURCE_DIR}/include/transformations")
ADD_LIBRARY(Transformations SHARED transformations.cpp)
<file_sep>/include/mdis/IsdReader.h
#ifndef IsdReader_h
#define IsdReader_h
#include <csm/Isd.h>
#include <json/json.hpp>
using namespace std;
using json = nlohmann::json;
//These are the different data types supported by the json library (with the exception of
//unknown, for handling extraneious input)
enum DataType {
INT,
UINT,
FLOAT,
STRING,
BOOL,
NULL8,
UNKNOWN
};
void addParam(csm::Isd &isd, json::iterator, DataType dt, int prec=12);
DataType checkType(json::value_type obj);
csm::Isd *readISD(string filename);
void printISD(const csm::Isd &isd);
#endif<file_sep>/src/apps/mdis2isd/mdis2isd.h
#ifndef MDIS2ISD_H
#define MDIS2ISD_H
#include "Cube.h"
#include "CameraPointInfo.h"
#include "FileName.h"
class QString;
namespace Isis{
class mdis2isd
{
public:
mdis2isd(QString cubeFile);
~mdis2isd();
void isdJSON(std::vector<std::pair<std::string,double> > * isdData,std::string sensorModel,
std::string filePath);
void writeISD();
private:
QString m_cubeFileString;
CameraPointInfo m_campt;
bool m_validCube;
static const int prec =16;
};
}
#endif // MDIS2ISD_H
<file_sep>/tests/MdisPluginTest.cpp
#include <MdisPlugin.h>
#include <MdisNacSensorModel.h>
#include <csm/Error.h>
#include <csm/Isd.h>
#include <csm/Model.h>
#include <csm/Version.h>
#include <gtest/gtest.h>
class MdisPluginTest : public ::testing::Test {
protected:
virtual void SetUp() {
mdisNacName = MdisNacSensorModel::_SENSOR_MODEL_NAME;
}
MdisPlugin defaultMdisPlugin;
std::string mdisNacName;
};
TEST_F(MdisPluginTest, getPluginName) {
EXPECT_EQ(defaultMdisPlugin.getPluginName(), "UsgsAstroFrameMdisPluginCSM");
}
TEST_F(MdisPluginTest, getManufacturer) {
EXPECT_EQ(defaultMdisPlugin.getManufacturer(), "UsgsAstrogeology");
}
TEST_F(MdisPluginTest, getReleaseDate) {
EXPECT_EQ(defaultMdisPlugin.getReleaseDate(), "TBA");
}
TEST_F(MdisPluginTest, getCsmVersion) {
const csm::Version v(3, 1, 0);
// const discard qualifier if we don't use the version() method on our csm::Version's
EXPECT_EQ(defaultMdisPlugin.getCsmVersion().version(), v.version());
}
TEST_F(MdisPluginTest, getNumModels) {
EXPECT_EQ(defaultMdisPlugin.getNumModels(), 1);
}
TEST_F(MdisPluginTest, getModelName) {
EXPECT_EQ(defaultMdisPlugin.getModelName(0), mdisNacName);
}
TEST_F(MdisPluginTest, getModelFamily) {
EXPECT_EQ(defaultMdisPlugin.getModelFamily(0), "Raster");
}
TEST_F(MdisPluginTest, getModelVersion) {
const csm::Version v(1, 0, 0);
// const discard qualifier if we don't use the version() method on our csm::Version's
EXPECT_EQ(defaultMdisPlugin.getModelVersion("blah").version(), v.version());
}
TEST_F(MdisPluginTest, constructModelFromISD) {
// Empty (i.e. invalid) ISD
EXPECT_THROW({
csm::Isd emptyIsd;
csm::Model *model = defaultMdisPlugin.constructModelFromISD(emptyIsd, mdisNacName);
},
csm::Error);
// ISD is not supported (wrong sensor model name)
EXPECT_THROW({
csm::Isd catSensor;
csm::Model *model = defaultMdisPlugin.constructModelFromISD(catSensor, "catCamera");
},
csm::Error);
}
<file_sep>/src/apps/mdis2isd/mdis2isdapp.cpp
#include <QString>
#include "mdis2isd.h"
#include "IException.h"
#include "Preference.h"
#include <iostream>
using namespace Isis;
using namespace std;
int main(int argc,char *argv[]) {
Preference::Preferences(true);
if (argc != 2) {
cout <<"Please enter the path to a Messenger cube" << endl;
return 0;
}
else {
QString cubePath(argv[1]);
cout << cubePath << endl;
mdis2isd m1(cubePath);
m1.writeISD();
return 0;
}
}
<file_sep>/src/objs/MdisPlugin.cpp
#include "MdisPlugin.h"
#include <cstdlib>
#include <string>
#include <csm/csm.h>
#include <csm/Error.h>
#include <csm/Plugin.h>
#include <csm/Warning.h>
#include "MdisNacSensorModel.h"
// Create static instance of self for plugin registration to work with csm::Plugin
const MdisPlugin MdisPlugin::m_registeredPlugin;
MdisPlugin::MdisPlugin() {
}
MdisPlugin::~MdisPlugin() {
}
std::string MdisPlugin::getPluginName() const {
return "UsgsAstroFrameMdisPluginCSM";
}
std::string MdisPlugin::getManufacturer() const {
return "UsgsAstrogeology";
}
std::string MdisPlugin::getReleaseDate() const {
return "TBA";
}
csm::Version MdisPlugin::getCsmVersion() const {
return csm::Version(3, 1, 0);
}
size_t MdisPlugin::getNumModels() const {
return 1;
}
std::string MdisPlugin::getModelName(size_t modelIndex) const {
return MdisNacSensorModel::_SENSOR_MODEL_NAME;
}
std::string MdisPlugin::getModelFamily(size_t modelIndex) const {
return "Raster";
}
csm::Version MdisPlugin::getModelVersion(const std::string &modelName) const {
return csm::Version(1, 0, 0);
}
bool MdisPlugin::canModelBeConstructedFromState(const std::string &modelName,
const std::string &modelState,
csm::WarningList *warnings) const {
return false;
}
bool MdisPlugin::canModelBeConstructedFromISD(const csm::Isd &imageSupportData,
const std::string &modelName,
csm::WarningList *warnings) const {
if (modelName != MdisNacSensorModel::_SENSOR_MODEL_NAME) {
return false;
}
return true;
}
csm::Model *MdisPlugin::constructModelFromState(const std::string&modelState,
csm::WarningList *warnings) const {
return NULL;
}
csm::Model *MdisPlugin::constructModelFromISD(const csm::Isd &imageSupportData,
const std::string &modelName,
csm::WarningList *warnings) const {
// Check if the sensor model can be constructed from ISD given the model name
if (!canModelBeConstructedFromISD(imageSupportData, modelName)) {
throw csm::Error(csm::Error::ISD_NOT_SUPPORTED,
"Sensor model support data provided is not supported by this plugin",
"MdisPlugin::constructModelFromISD");
}
MdisNacSensorModel *sensorModel = new MdisNacSensorModel();
// Keep track of necessary keywords that are missing from the ISD.
std::vector<std::string> missingKeywords;
sensorModel->m_startingDetectorSample =
atof(imageSupportData.param("starting_detector_sample").c_str());
sensorModel->m_startingDetectorLine =
atof(imageSupportData.param("starting_detector_line").c_str());
sensorModel->m_targetName = imageSupportData.param("target_name");
sensorModel->m_ifov = atof(imageSupportData.param("ifov").c_str());
sensorModel->m_instrumentID = imageSupportData.param("instrument_id");
if (imageSupportData.param("instrument_id") == "") {
missingKeywords.push_back("instrument_id");
}
sensorModel->m_focalLength = atof(imageSupportData.param("focal_length").c_str());
if (imageSupportData.param("focal_length") == "") {
missingKeywords.push_back("focal_length");
}
sensorModel->m_focalLengthEpsilon =
atof(imageSupportData.param("focal_length_epsilon").c_str());
sensorModel->m_currentParameterValue[0] =
atof(imageSupportData.param("x_sensor_origin").c_str());
sensorModel->m_currentParameterValue[1] =
atof(imageSupportData.param("y_sensor_origin").c_str());
sensorModel->m_currentParameterValue[2] =
atof(imageSupportData.param("z_sensor_origin").c_str());
if (imageSupportData.param("x_sensor_origin") == "") {
missingKeywords.push_back("x_sensor_origin");
}
if (imageSupportData.param("y_sensor_origin") == "") {
missingKeywords.push_back("y_sensor_origin");
}
if (imageSupportData.param("z_sensor_origin") == "") {
missingKeywords.push_back("z_sensor_origin");
}
sensorModel->m_spacecraftVelocity[0] =
atof(imageSupportData.param("x_sensor_velocity").c_str());
sensorModel->m_spacecraftVelocity[1] =
atof(imageSupportData.param("y_sensor_velocity").c_str());
sensorModel->m_spacecraftVelocity[2] =
atof(imageSupportData.param("z_sensor_velocity").c_str());
// sensor velocity not strictly necessary?
sensorModel->m_sunPosition[0] =
atof(imageSupportData.param("x_sun_position").c_str());
sensorModel->m_sunPosition[1] =
atof(imageSupportData.param("y_sun_position").c_str());
sensorModel->m_sunPosition[2] =
atof(imageSupportData.param("z_sun_position").c_str());
// sun position is not strictly necessary, but is required for getIlluminationDirection.
sensorModel->m_currentParameterValue[3] = atof(imageSupportData.param("omega").c_str());
sensorModel->m_currentParameterValue[4] = atof(imageSupportData.param("phi").c_str());
sensorModel->m_currentParameterValue[5] = atof(imageSupportData.param("kappa").c_str());
if (imageSupportData.param("omega") == "") {
missingKeywords.push_back("omega");
}
if (imageSupportData.param("phi") == "") {
missingKeywords.push_back("phi");
}
if (imageSupportData.param("kappa") == "") {
missingKeywords.push_back("kappa");
}
sensorModel->m_odtX[0] = atof(imageSupportData.param("odt_x", 0).c_str());
sensorModel->m_odtX[1] = atof(imageSupportData.param("odt_x", 1).c_str());
sensorModel->m_odtX[2] = atof(imageSupportData.param("odt_x", 2).c_str());
sensorModel->m_odtX[3] = atof(imageSupportData.param("odt_x", 3).c_str());
sensorModel->m_odtX[4] = atof(imageSupportData.param("odt_x", 4).c_str());
sensorModel->m_odtX[5] = atof(imageSupportData.param("odt_x", 5).c_str());
sensorModel->m_odtX[6] = atof(imageSupportData.param("odt_x", 6).c_str());
sensorModel->m_odtX[7] = atof(imageSupportData.param("odt_x", 7).c_str());
sensorModel->m_odtX[8] = atof(imageSupportData.param("odt_x", 8).c_str());
sensorModel->m_odtX[9] = atof(imageSupportData.param("odt_x", 9).c_str());
sensorModel->m_odtY[0] = atof(imageSupportData.param("odt_y", 0).c_str());
sensorModel->m_odtY[1] = atof(imageSupportData.param("odt_y", 1).c_str());
sensorModel->m_odtY[2] = atof(imageSupportData.param("odt_y", 2).c_str());
sensorModel->m_odtY[3] = atof(imageSupportData.param("odt_y", 3).c_str());
sensorModel->m_odtY[4] = atof(imageSupportData.param("odt_y", 4).c_str());
sensorModel->m_odtY[5] = atof(imageSupportData.param("odt_y", 5).c_str());
sensorModel->m_odtY[6] = atof(imageSupportData.param("odt_y", 6).c_str());
sensorModel->m_odtY[7] = atof(imageSupportData.param("odt_y", 7).c_str());
sensorModel->m_odtY[8] = atof(imageSupportData.param("odt_y", 8).c_str());
sensorModel->m_odtY[9] = atof(imageSupportData.param("odt_y", 9).c_str());
sensorModel->m_ccdCenter[0] = atof(imageSupportData.param("ccd_center", 0).c_str());
sensorModel->m_ccdCenter[1] = atof(imageSupportData.param("ccd_center", 1).c_str());
sensorModel->m_originalHalfLines = atof(imageSupportData.param("original_half_lines").c_str());
sensorModel->m_spacecraftName = imageSupportData.param("spacecraft_name");
sensorModel->m_pixelPitch = atof(imageSupportData.param("pixel_pitch").c_str());
sensorModel->m_iTransS[0] = atof(imageSupportData.param("itrans_sample", 0).c_str());
sensorModel->m_iTransS[1] = atof(imageSupportData.param("itrans_sample", 1).c_str());
sensorModel->m_iTransS[2] = atof(imageSupportData.param("itrans_sample", 2).c_str());
if (imageSupportData.param("itrans_sample", 0) == "") {
missingKeywords.push_back("itrans_sample needs 3 elements");
}
else if (imageSupportData.param("itrans_sample", 1) == "") {
missingKeywords.push_back("itrans_sample needs 3 elements");
}
else if (imageSupportData.param("itrans_sample", 2) == "") {
missingKeywords.push_back("itrans_sample needs 3 elements");
}
sensorModel->m_ephemerisTime = atof(imageSupportData.param("ephemeris_time").c_str());
if (imageSupportData.param("ephemeris_time") == "") {
missingKeywords.push_back("ephemeris_time");
}
sensorModel->m_originalHalfSamples =
atof(imageSupportData.param("original_half_samples").c_str());
sensorModel->m_boresight[0] = atof(imageSupportData.param("boresight", 0).c_str());
sensorModel->m_boresight[1] = atof(imageSupportData.param("boresight", 1).c_str());
sensorModel->m_boresight[2] = atof(imageSupportData.param("boresight", 2).c_str());
sensorModel->m_iTransL[0] = atof(imageSupportData.param("itrans_line", 0).c_str());
sensorModel->m_iTransL[1] = atof(imageSupportData.param("itrans_line", 1).c_str());
sensorModel->m_iTransL[2] = atof(imageSupportData.param("itrans_line", 2).c_str());
if (imageSupportData.param("itrans_line", 0) == "") {
missingKeywords.push_back("itrans_line needs 3 elements");
}
else if (imageSupportData.param("itrans_line", 1) == "") {
missingKeywords.push_back("itrans_line needs 3 elements");
}
else if (imageSupportData.param("itrans_line", 2) == "") {
missingKeywords.push_back("itrans_line needs 3 elements");
}
sensorModel->m_nLines = atoi(imageSupportData.param("nlines").c_str());
sensorModel->m_nSamples = atoi(imageSupportData.param("nsamples").c_str());
if (imageSupportData.param("nlines") == "") {
missingKeywords.push_back("nlines");
}
if (imageSupportData.param("nsamples") == "") {
missingKeywords.push_back("nsamples");
}
sensorModel->m_transY[0] = atof(imageSupportData.param("transy", 0).c_str());
sensorModel->m_transY[1] = atof(imageSupportData.param("transy", 1).c_str());
sensorModel->m_transY[2] = atof(imageSupportData.param("transy", 2).c_str());
if (imageSupportData.param("transy", 0) == "") {
missingKeywords.push_back("transy");
}
else if (imageSupportData.param("transy", 1) == "") {
missingKeywords.push_back("transy");
}
else if (imageSupportData.param("transy", 2) == "") {
missingKeywords.push_back("transy");
}
sensorModel->m_transX[0] = atof(imageSupportData.param("transx", 0).c_str());
sensorModel->m_transX[1] = atof(imageSupportData.param("transx", 1).c_str());
sensorModel->m_transX[2] = atof(imageSupportData.param("transx", 2).c_str());
if (imageSupportData.param("transx", 0) == "") {
missingKeywords.push_back("transx");
}
else if (imageSupportData.param("transx", 1) == "") {
missingKeywords.push_back("transx");
}
else if (imageSupportData.param("transx", 2) == "") {
missingKeywords.push_back("transx");
}
sensorModel->m_majorAxis = 1000 * atof(imageSupportData.param("semi_major_axis").c_str());
if (imageSupportData.param("semi_major_axis") == "") {
missingKeywords.push_back("semi_major_axis");
}
// Do we assume that if we do not have a semi-minor axis, then the body is a sphere?
if (imageSupportData.param("semi_minor_axis") == "") {
sensorModel->m_minorAxis = sensorModel->m_majorAxis;
}
else {
sensorModel->m_minorAxis = 1000 * atof(imageSupportData.param("semi_minor_axis").c_str());
}
// If we are missing necessary keywords from ISD, we cannot create a valid sensor model.
if (missingKeywords.size() != 0) {
std::string errorMessage = "ISD is missing the necessary keywords: [";
for (int i = 0; i < missingKeywords.size(); i++) {
if (i == missingKeywords.size() - 1) {
errorMessage += missingKeywords[i] + "]";
}
else {
errorMessage += missingKeywords[i] + ", ";
}
}
throw csm::Error(csm::Error::SENSOR_MODEL_NOT_CONSTRUCTIBLE,
errorMessage,
"MdisPlugin::constructModelFromISD");
}
return sensorModel;
}
std::string MdisPlugin::getModelNameFromModelState(const std::string &modelState,
csm::WarningList *warnings) const {
return "state";
}
bool MdisPlugin::canISDBeConvertedToModelState(const csm::Isd &imageSupportData,
const std::string &modelName,
csm::WarningList *warnings) const {
return false;
}
std::string MdisPlugin::convertISDToModelState(const csm::Isd &imageSupportData,
const std::string &modelName,
csm::WarningList *warnings) const {
return "state";
}
|
79219512c960adbf4283c14d150cce02d678161a
|
[
"Markdown",
"Python",
"CMake",
"C++"
] | 32
|
Python
|
thareUSGS/camera_model
|
d69bd6be1656f9a17b28a41a196165ef94fc3c03
|
19e77b73030135a8f2dcb9fd2efeae427ed949d6
|
refs/heads/master
|
<repo_name>xatiamacharadze/MT<file_sep>/src/js/main.js
$(".main-container").mousemove(function(e) {
parallaxIt(e, ".name-container", -80);
parallaxIt(e, ".mt-container", -30);
});
function parallaxIt(e, target, movement) {
let $this = $("#container");
let relX = e.pageX - $this.offset().left;
let relY = e.pageY - $this.offset().top;
// console.log(e.pageX, e.pageY, $this.offset().left, $this.offset().top);
TweenMax.to(target, 1, {
x: (relX - $this.width() / 2) / $this.width() * movement,
y: (relY - $this.height() / 2) / $this.height() * movement
});
}
|
b9659bc05bf279753a8f24020c7e13e1fa3ae4da
|
[
"JavaScript"
] | 1
|
JavaScript
|
xatiamacharadze/MT
|
19071271e2ffc431da9f2c7bf03f9755a0c66af3
|
8916035d072a5c253b50a974442ae7e4eee66ae5
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.